1 /* Arm MVE intrinsics include file.
3 Copyright (C) 2019-2023 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #ifndef _GCC_ARM_MVE_H
23 #define _GCC_ARM_MVE_H
26 #error "MVE intrinsics are not supported in Big-Endian mode."
27 #elif !__ARM_FEATURE_MVE
28 #error "MVE feature not supported"
35 #include "arm_mve_types.h"
37 #ifndef __ARM_MVE_PRESERVE_USER_NAMESPACE
38 #define vst4q(__addr, __value) __arm_vst4q(__addr, __value)
39 #define vdupq_n(__a) __arm_vdupq_n(__a)
40 #define vabsq(__a) __arm_vabsq(__a)
41 #define vclsq(__a) __arm_vclsq(__a)
42 #define vclzq(__a) __arm_vclzq(__a)
43 #define vnegq(__a) __arm_vnegq(__a)
44 #define vaddlvq(__a) __arm_vaddlvq(__a)
45 #define vaddvq(__a) __arm_vaddvq(__a)
46 #define vmovlbq(__a) __arm_vmovlbq(__a)
47 #define vmovltq(__a) __arm_vmovltq(__a)
48 #define vmvnq(__a) __arm_vmvnq(__a)
49 #define vrev16q(__a) __arm_vrev16q(__a)
50 #define vrev32q(__a) __arm_vrev32q(__a)
51 #define vrev64q(__a) __arm_vrev64q(__a)
52 #define vqabsq(__a) __arm_vqabsq(__a)
53 #define vqnegq(__a) __arm_vqnegq(__a)
54 #define vshrq(__a, __imm) __arm_vshrq(__a, __imm)
55 #define vaddlvq_p(__a, __p) __arm_vaddlvq_p(__a, __p)
56 #define vcmpneq(__a, __b) __arm_vcmpneq(__a, __b)
57 #define vshlq(__a, __b) __arm_vshlq(__a, __b)
58 #define vsubq(__a, __b) __arm_vsubq(__a, __b)
59 #define vrmulhq(__a, __b) __arm_vrmulhq(__a, __b)
60 #define vrhaddq(__a, __b) __arm_vrhaddq(__a, __b)
61 #define vqsubq(__a, __b) __arm_vqsubq(__a, __b)
62 #define vqaddq(__a, __b) __arm_vqaddq(__a, __b)
63 #define vorrq(__a, __b) __arm_vorrq(__a, __b)
64 #define vornq(__a, __b) __arm_vornq(__a, __b)
65 #define vmulq(__a, __b) __arm_vmulq(__a, __b)
66 #define vmulltq_int(__a, __b) __arm_vmulltq_int(__a, __b)
67 #define vmullbq_int(__a, __b) __arm_vmullbq_int(__a, __b)
68 #define vmulhq(__a, __b) __arm_vmulhq(__a, __b)
69 #define vmladavq(__a, __b) __arm_vmladavq(__a, __b)
70 #define vminvq(__a, __b) __arm_vminvq(__a, __b)
71 #define vminq(__a, __b) __arm_vminq(__a, __b)
72 #define vmaxvq(__a, __b) __arm_vmaxvq(__a, __b)
73 #define vmaxq(__a, __b) __arm_vmaxq(__a, __b)
74 #define vhsubq(__a, __b) __arm_vhsubq(__a, __b)
75 #define vhaddq(__a, __b) __arm_vhaddq(__a, __b)
76 #define veorq(__a, __b) __arm_veorq(__a, __b)
77 #define vcmphiq(__a, __b) __arm_vcmphiq(__a, __b)
78 #define vcmpeqq(__a, __b) __arm_vcmpeqq(__a, __b)
79 #define vcmpcsq(__a, __b) __arm_vcmpcsq(__a, __b)
80 #define vcaddq_rot90(__a, __b) __arm_vcaddq_rot90(__a, __b)
81 #define vcaddq_rot270(__a, __b) __arm_vcaddq_rot270(__a, __b)
82 #define vbicq(__a, __b) __arm_vbicq(__a, __b)
83 #define vandq(__a, __b) __arm_vandq(__a, __b)
84 #define vaddvq_p(__a, __p) __arm_vaddvq_p(__a, __p)
85 #define vaddvaq(__a, __b) __arm_vaddvaq(__a, __b)
86 #define vaddq(__a, __b) __arm_vaddq(__a, __b)
87 #define vabdq(__a, __b) __arm_vabdq(__a, __b)
88 #define vshlq_r(__a, __b) __arm_vshlq_r(__a, __b)
89 #define vrshlq(__a, __b) __arm_vrshlq(__a, __b)
90 #define vqshlq(__a, __b) __arm_vqshlq(__a, __b)
91 #define vqshlq_r(__a, __b) __arm_vqshlq_r(__a, __b)
92 #define vqrshlq(__a, __b) __arm_vqrshlq(__a, __b)
93 #define vminavq(__a, __b) __arm_vminavq(__a, __b)
94 #define vminaq(__a, __b) __arm_vminaq(__a, __b)
95 #define vmaxavq(__a, __b) __arm_vmaxavq(__a, __b)
96 #define vmaxaq(__a, __b) __arm_vmaxaq(__a, __b)
97 #define vbrsrq(__a, __b) __arm_vbrsrq(__a, __b)
98 #define vshlq_n(__a, __imm) __arm_vshlq_n(__a, __imm)
99 #define vrshrq(__a, __imm) __arm_vrshrq(__a, __imm)
100 #define vqshlq_n(__a, __imm) __arm_vqshlq_n(__a, __imm)
101 #define vcmpltq(__a, __b) __arm_vcmpltq(__a, __b)
102 #define vcmpleq(__a, __b) __arm_vcmpleq(__a, __b)
103 #define vcmpgtq(__a, __b) __arm_vcmpgtq(__a, __b)
104 #define vcmpgeq(__a, __b) __arm_vcmpgeq(__a, __b)
105 #define vqshluq(__a, __imm) __arm_vqshluq(__a, __imm)
106 #define vqrdmulhq(__a, __b) __arm_vqrdmulhq(__a, __b)
107 #define vqdmulhq(__a, __b) __arm_vqdmulhq(__a, __b)
108 #define vmlsdavxq(__a, __b) __arm_vmlsdavxq(__a, __b)
109 #define vmlsdavq(__a, __b) __arm_vmlsdavq(__a, __b)
110 #define vmladavxq(__a, __b) __arm_vmladavxq(__a, __b)
111 #define vhcaddq_rot90(__a, __b) __arm_vhcaddq_rot90(__a, __b)
112 #define vhcaddq_rot270(__a, __b) __arm_vhcaddq_rot270(__a, __b)
113 #define vqmovntq(__a, __b) __arm_vqmovntq(__a, __b)
114 #define vqmovnbq(__a, __b) __arm_vqmovnbq(__a, __b)
115 #define vmulltq_poly(__a, __b) __arm_vmulltq_poly(__a, __b)
116 #define vmullbq_poly(__a, __b) __arm_vmullbq_poly(__a, __b)
117 #define vmovntq(__a, __b) __arm_vmovntq(__a, __b)
118 #define vmovnbq(__a, __b) __arm_vmovnbq(__a, __b)
119 #define vmlaldavq(__a, __b) __arm_vmlaldavq(__a, __b)
120 #define vqmovuntq(__a, __b) __arm_vqmovuntq(__a, __b)
121 #define vqmovunbq(__a, __b) __arm_vqmovunbq(__a, __b)
122 #define vshlltq(__a, __imm) __arm_vshlltq(__a, __imm)
123 #define vshllbq(__a, __imm) __arm_vshllbq(__a, __imm)
124 #define vqdmulltq(__a, __b) __arm_vqdmulltq(__a, __b)
125 #define vqdmullbq(__a, __b) __arm_vqdmullbq(__a, __b)
126 #define vmlsldavxq(__a, __b) __arm_vmlsldavxq(__a, __b)
127 #define vmlsldavq(__a, __b) __arm_vmlsldavq(__a, __b)
128 #define vmlaldavxq(__a, __b) __arm_vmlaldavxq(__a, __b)
129 #define vrmlaldavhq(__a, __b) __arm_vrmlaldavhq(__a, __b)
130 #define vaddlvaq(__a, __b) __arm_vaddlvaq(__a, __b)
131 #define vrmlsldavhxq(__a, __b) __arm_vrmlsldavhxq(__a, __b)
132 #define vrmlsldavhq(__a, __b) __arm_vrmlsldavhq(__a, __b)
133 #define vrmlaldavhxq(__a, __b) __arm_vrmlaldavhxq(__a, __b)
134 #define vabavq(__a, __b, __c) __arm_vabavq(__a, __b, __c)
135 #define vbicq_m_n(__a, __imm, __p) __arm_vbicq_m_n(__a, __imm, __p)
136 #define vqrshrnbq(__a, __b, __imm) __arm_vqrshrnbq(__a, __b, __imm)
137 #define vqrshrunbq(__a, __b, __imm) __arm_vqrshrunbq(__a, __b, __imm)
138 #define vrmlaldavhaq(__a, __b, __c) __arm_vrmlaldavhaq(__a, __b, __c)
139 #define vshlcq(__a, __b, __imm) __arm_vshlcq(__a, __b, __imm)
140 #define vpselq(__a, __b, __p) __arm_vpselq(__a, __b, __p)
141 #define vrev64q_m(__inactive, __a, __p) __arm_vrev64q_m(__inactive, __a, __p)
142 #define vqrdmlashq(__a, __b, __c) __arm_vqrdmlashq(__a, __b, __c)
143 #define vqrdmlahq(__a, __b, __c) __arm_vqrdmlahq(__a, __b, __c)
144 #define vqdmlashq(__a, __b, __c) __arm_vqdmlashq(__a, __b, __c)
145 #define vqdmlahq(__a, __b, __c) __arm_vqdmlahq(__a, __b, __c)
146 #define vmvnq_m(__inactive, __a, __p) __arm_vmvnq_m(__inactive, __a, __p)
147 #define vmlasq(__a, __b, __c) __arm_vmlasq(__a, __b, __c)
148 #define vmlaq(__a, __b, __c) __arm_vmlaq(__a, __b, __c)
149 #define vmladavq_p(__a, __b, __p) __arm_vmladavq_p(__a, __b, __p)
150 #define vmladavaq(__a, __b, __c) __arm_vmladavaq(__a, __b, __c)
151 #define vminvq_p(__a, __b, __p) __arm_vminvq_p(__a, __b, __p)
152 #define vmaxvq_p(__a, __b, __p) __arm_vmaxvq_p(__a, __b, __p)
153 #define vdupq_m(__inactive, __a, __p) __arm_vdupq_m(__inactive, __a, __p)
154 #define vcmpneq_m(__a, __b, __p) __arm_vcmpneq_m(__a, __b, __p)
155 #define vcmphiq_m(__a, __b, __p) __arm_vcmphiq_m(__a, __b, __p)
156 #define vcmpeqq_m(__a, __b, __p) __arm_vcmpeqq_m(__a, __b, __p)
157 #define vcmpcsq_m(__a, __b, __p) __arm_vcmpcsq_m(__a, __b, __p)
158 #define vcmpcsq_m_n(__a, __b, __p) __arm_vcmpcsq_m_n(__a, __b, __p)
159 #define vclzq_m(__inactive, __a, __p) __arm_vclzq_m(__inactive, __a, __p)
160 #define vaddvaq_p(__a, __b, __p) __arm_vaddvaq_p(__a, __b, __p)
161 #define vsriq(__a, __b, __imm) __arm_vsriq(__a, __b, __imm)
162 #define vsliq(__a, __b, __imm) __arm_vsliq(__a, __b, __imm)
163 #define vshlq_m_r(__a, __b, __p) __arm_vshlq_m_r(__a, __b, __p)
164 #define vrshlq_m_n(__a, __b, __p) __arm_vrshlq_m_n(__a, __b, __p)
165 #define vqshlq_m_r(__a, __b, __p) __arm_vqshlq_m_r(__a, __b, __p)
166 #define vqrshlq_m_n(__a, __b, __p) __arm_vqrshlq_m_n(__a, __b, __p)
167 #define vminavq_p(__a, __b, __p) __arm_vminavq_p(__a, __b, __p)
168 #define vminaq_m(__a, __b, __p) __arm_vminaq_m(__a, __b, __p)
169 #define vmaxavq_p(__a, __b, __p) __arm_vmaxavq_p(__a, __b, __p)
170 #define vmaxaq_m(__a, __b, __p) __arm_vmaxaq_m(__a, __b, __p)
171 #define vcmpltq_m(__a, __b, __p) __arm_vcmpltq_m(__a, __b, __p)
172 #define vcmpleq_m(__a, __b, __p) __arm_vcmpleq_m(__a, __b, __p)
173 #define vcmpgtq_m(__a, __b, __p) __arm_vcmpgtq_m(__a, __b, __p)
174 #define vcmpgeq_m(__a, __b, __p) __arm_vcmpgeq_m(__a, __b, __p)
175 #define vqnegq_m(__inactive, __a, __p) __arm_vqnegq_m(__inactive, __a, __p)
176 #define vqabsq_m(__inactive, __a, __p) __arm_vqabsq_m(__inactive, __a, __p)
177 #define vnegq_m(__inactive, __a, __p) __arm_vnegq_m(__inactive, __a, __p)
178 #define vmlsdavxq_p(__a, __b, __p) __arm_vmlsdavxq_p(__a, __b, __p)
179 #define vmlsdavq_p(__a, __b, __p) __arm_vmlsdavq_p(__a, __b, __p)
180 #define vmladavxq_p(__a, __b, __p) __arm_vmladavxq_p(__a, __b, __p)
181 #define vclsq_m(__inactive, __a, __p) __arm_vclsq_m(__inactive, __a, __p)
182 #define vabsq_m(__inactive, __a, __p) __arm_vabsq_m(__inactive, __a, __p)
183 #define vqrdmlsdhxq(__inactive, __a, __b) __arm_vqrdmlsdhxq(__inactive, __a, __b)
184 #define vqrdmlsdhq(__inactive, __a, __b) __arm_vqrdmlsdhq(__inactive, __a, __b)
185 #define vqrdmladhxq(__inactive, __a, __b) __arm_vqrdmladhxq(__inactive, __a, __b)
186 #define vqrdmladhq(__inactive, __a, __b) __arm_vqrdmladhq(__inactive, __a, __b)
187 #define vqdmlsdhxq(__inactive, __a, __b) __arm_vqdmlsdhxq(__inactive, __a, __b)
188 #define vqdmlsdhq(__inactive, __a, __b) __arm_vqdmlsdhq(__inactive, __a, __b)
189 #define vqdmladhxq(__inactive, __a, __b) __arm_vqdmladhxq(__inactive, __a, __b)
190 #define vqdmladhq(__inactive, __a, __b) __arm_vqdmladhq(__inactive, __a, __b)
191 #define vmlsdavaxq(__a, __b, __c) __arm_vmlsdavaxq(__a, __b, __c)
192 #define vmlsdavaq(__a, __b, __c) __arm_vmlsdavaq(__a, __b, __c)
193 #define vmladavaxq(__a, __b, __c) __arm_vmladavaxq(__a, __b, __c)
194 #define vrmlaldavhaxq(__a, __b, __c) __arm_vrmlaldavhaxq(__a, __b, __c)
195 #define vrmlsldavhaq(__a, __b, __c) __arm_vrmlsldavhaq(__a, __b, __c)
196 #define vrmlsldavhaxq(__a, __b, __c) __arm_vrmlsldavhaxq(__a, __b, __c)
197 #define vaddlvaq_p(__a, __b, __p) __arm_vaddlvaq_p(__a, __b, __p)
198 #define vrev16q_m(__inactive, __a, __p) __arm_vrev16q_m(__inactive, __a, __p)
199 #define vrmlaldavhq_p(__a, __b, __p) __arm_vrmlaldavhq_p(__a, __b, __p)
200 #define vrmlaldavhxq_p(__a, __b, __p) __arm_vrmlaldavhxq_p(__a, __b, __p)
201 #define vrmlsldavhq_p(__a, __b, __p) __arm_vrmlsldavhq_p(__a, __b, __p)
202 #define vrmlsldavhxq_p(__a, __b, __p) __arm_vrmlsldavhxq_p(__a, __b, __p)
203 #define vorrq_m_n(__a, __imm, __p) __arm_vorrq_m_n(__a, __imm, __p)
204 #define vqrshrntq(__a, __b, __imm) __arm_vqrshrntq(__a, __b, __imm)
205 #define vqshrnbq(__a, __b, __imm) __arm_vqshrnbq(__a, __b, __imm)
206 #define vqshrntq(__a, __b, __imm) __arm_vqshrntq(__a, __b, __imm)
207 #define vrshrnbq(__a, __b, __imm) __arm_vrshrnbq(__a, __b, __imm)
208 #define vrshrntq(__a, __b, __imm) __arm_vrshrntq(__a, __b, __imm)
209 #define vshrnbq(__a, __b, __imm) __arm_vshrnbq(__a, __b, __imm)
210 #define vshrntq(__a, __b, __imm) __arm_vshrntq(__a, __b, __imm)
211 #define vmlaldavaq(__a, __b, __c) __arm_vmlaldavaq(__a, __b, __c)
212 #define vmlaldavaxq(__a, __b, __c) __arm_vmlaldavaxq(__a, __b, __c)
213 #define vmlsldavaq(__a, __b, __c) __arm_vmlsldavaq(__a, __b, __c)
214 #define vmlsldavaxq(__a, __b, __c) __arm_vmlsldavaxq(__a, __b, __c)
215 #define vmlaldavq_p(__a, __b, __p) __arm_vmlaldavq_p(__a, __b, __p)
216 #define vmlaldavxq_p(__a, __b, __p) __arm_vmlaldavxq_p(__a, __b, __p)
217 #define vmlsldavq_p(__a, __b, __p) __arm_vmlsldavq_p(__a, __b, __p)
218 #define vmlsldavxq_p(__a, __b, __p) __arm_vmlsldavxq_p(__a, __b, __p)
219 #define vmovlbq_m(__inactive, __a, __p) __arm_vmovlbq_m(__inactive, __a, __p)
220 #define vmovltq_m(__inactive, __a, __p) __arm_vmovltq_m(__inactive, __a, __p)
221 #define vmovnbq_m(__a, __b, __p) __arm_vmovnbq_m(__a, __b, __p)
222 #define vmovntq_m(__a, __b, __p) __arm_vmovntq_m(__a, __b, __p)
223 #define vqmovnbq_m(__a, __b, __p) __arm_vqmovnbq_m(__a, __b, __p)
224 #define vqmovntq_m(__a, __b, __p) __arm_vqmovntq_m(__a, __b, __p)
225 #define vrev32q_m(__inactive, __a, __p) __arm_vrev32q_m(__inactive, __a, __p)
226 #define vqrshruntq(__a, __b, __imm) __arm_vqrshruntq(__a, __b, __imm)
227 #define vqshrunbq(__a, __b, __imm) __arm_vqshrunbq(__a, __b, __imm)
228 #define vqshruntq(__a, __b, __imm) __arm_vqshruntq(__a, __b, __imm)
229 #define vqmovunbq_m(__a, __b, __p) __arm_vqmovunbq_m(__a, __b, __p)
230 #define vqmovuntq_m(__a, __b, __p) __arm_vqmovuntq_m(__a, __b, __p)
231 #define vsriq_m(__a, __b, __imm, __p) __arm_vsriq_m(__a, __b, __imm, __p)
232 #define vsubq_m(__inactive, __a, __b, __p) __arm_vsubq_m(__inactive, __a, __b, __p)
233 #define vqshluq_m(__inactive, __a, __imm, __p) __arm_vqshluq_m(__inactive, __a, __imm, __p)
234 #define vabavq_p(__a, __b, __c, __p) __arm_vabavq_p(__a, __b, __c, __p)
235 #define vshlq_m(__inactive, __a, __b, __p) __arm_vshlq_m(__inactive, __a, __b, __p)
236 #define vabdq_m(__inactive, __a, __b, __p) __arm_vabdq_m(__inactive, __a, __b, __p)
237 #define vaddq_m(__inactive, __a, __b, __p) __arm_vaddq_m(__inactive, __a, __b, __p)
238 #define vandq_m(__inactive, __a, __b, __p) __arm_vandq_m(__inactive, __a, __b, __p)
239 #define vbicq_m(__inactive, __a, __b, __p) __arm_vbicq_m(__inactive, __a, __b, __p)
240 #define vbrsrq_m(__inactive, __a, __b, __p) __arm_vbrsrq_m(__inactive, __a, __b, __p)
241 #define vcaddq_rot270_m(__inactive, __a, __b, __p) __arm_vcaddq_rot270_m(__inactive, __a, __b, __p)
242 #define vcaddq_rot90_m(__inactive, __a, __b, __p) __arm_vcaddq_rot90_m(__inactive, __a, __b, __p)
243 #define veorq_m(__inactive, __a, __b, __p) __arm_veorq_m(__inactive, __a, __b, __p)
244 #define vhaddq_m(__inactive, __a, __b, __p) __arm_vhaddq_m(__inactive, __a, __b, __p)
245 #define vhcaddq_rot270_m(__inactive, __a, __b, __p) __arm_vhcaddq_rot270_m(__inactive, __a, __b, __p)
246 #define vhcaddq_rot90_m(__inactive, __a, __b, __p) __arm_vhcaddq_rot90_m(__inactive, __a, __b, __p)
247 #define vhsubq_m(__inactive, __a, __b, __p) __arm_vhsubq_m(__inactive, __a, __b, __p)
248 #define vmaxq_m(__inactive, __a, __b, __p) __arm_vmaxq_m(__inactive, __a, __b, __p)
249 #define vminq_m(__inactive, __a, __b, __p) __arm_vminq_m(__inactive, __a, __b, __p)
250 #define vmladavaq_p(__a, __b, __c, __p) __arm_vmladavaq_p(__a, __b, __c, __p)
251 #define vmladavaxq_p(__a, __b, __c, __p) __arm_vmladavaxq_p(__a, __b, __c, __p)
252 #define vmlaq_m(__a, __b, __c, __p) __arm_vmlaq_m(__a, __b, __c, __p)
253 #define vmlasq_m(__a, __b, __c, __p) __arm_vmlasq_m(__a, __b, __c, __p)
254 #define vmlsdavaq_p(__a, __b, __c, __p) __arm_vmlsdavaq_p(__a, __b, __c, __p)
255 #define vmlsdavaxq_p(__a, __b, __c, __p) __arm_vmlsdavaxq_p(__a, __b, __c, __p)
256 #define vmulhq_m(__inactive, __a, __b, __p) __arm_vmulhq_m(__inactive, __a, __b, __p)
257 #define vmullbq_int_m(__inactive, __a, __b, __p) __arm_vmullbq_int_m(__inactive, __a, __b, __p)
258 #define vmulltq_int_m(__inactive, __a, __b, __p) __arm_vmulltq_int_m(__inactive, __a, __b, __p)
259 #define vmulq_m(__inactive, __a, __b, __p) __arm_vmulq_m(__inactive, __a, __b, __p)
260 #define vornq_m(__inactive, __a, __b, __p) __arm_vornq_m(__inactive, __a, __b, __p)
261 #define vorrq_m(__inactive, __a, __b, __p) __arm_vorrq_m(__inactive, __a, __b, __p)
262 #define vqaddq_m(__inactive, __a, __b, __p) __arm_vqaddq_m(__inactive, __a, __b, __p)
263 #define vqdmladhq_m(__inactive, __a, __b, __p) __arm_vqdmladhq_m(__inactive, __a, __b, __p)
264 #define vqdmlashq_m(__a, __b, __c, __p) __arm_vqdmlashq_m(__a, __b, __c, __p)
265 #define vqdmladhxq_m(__inactive, __a, __b, __p) __arm_vqdmladhxq_m(__inactive, __a, __b, __p)
266 #define vqdmlahq_m(__a, __b, __c, __p) __arm_vqdmlahq_m(__a, __b, __c, __p)
267 #define vqdmlsdhq_m(__inactive, __a, __b, __p) __arm_vqdmlsdhq_m(__inactive, __a, __b, __p)
268 #define vqdmlsdhxq_m(__inactive, __a, __b, __p) __arm_vqdmlsdhxq_m(__inactive, __a, __b, __p)
269 #define vqdmulhq_m(__inactive, __a, __b, __p) __arm_vqdmulhq_m(__inactive, __a, __b, __p)
270 #define vqrdmladhq_m(__inactive, __a, __b, __p) __arm_vqrdmladhq_m(__inactive, __a, __b, __p)
271 #define vqrdmladhxq_m(__inactive, __a, __b, __p) __arm_vqrdmladhxq_m(__inactive, __a, __b, __p)
272 #define vqrdmlahq_m(__a, __b, __c, __p) __arm_vqrdmlahq_m(__a, __b, __c, __p)
273 #define vqrdmlashq_m(__a, __b, __c, __p) __arm_vqrdmlashq_m(__a, __b, __c, __p)
274 #define vqrdmlsdhq_m(__inactive, __a, __b, __p) __arm_vqrdmlsdhq_m(__inactive, __a, __b, __p)
275 #define vqrdmlsdhxq_m(__inactive, __a, __b, __p) __arm_vqrdmlsdhxq_m(__inactive, __a, __b, __p)
276 #define vqrdmulhq_m(__inactive, __a, __b, __p) __arm_vqrdmulhq_m(__inactive, __a, __b, __p)
277 #define vqrshlq_m(__inactive, __a, __b, __p) __arm_vqrshlq_m(__inactive, __a, __b, __p)
278 #define vqshlq_m_n(__inactive, __a, __imm, __p) __arm_vqshlq_m_n(__inactive, __a, __imm, __p)
279 #define vqshlq_m(__inactive, __a, __b, __p) __arm_vqshlq_m(__inactive, __a, __b, __p)
280 #define vqsubq_m(__inactive, __a, __b, __p) __arm_vqsubq_m(__inactive, __a, __b, __p)
281 #define vrhaddq_m(__inactive, __a, __b, __p) __arm_vrhaddq_m(__inactive, __a, __b, __p)
282 #define vrmulhq_m(__inactive, __a, __b, __p) __arm_vrmulhq_m(__inactive, __a, __b, __p)
283 #define vrshlq_m(__inactive, __a, __b, __p) __arm_vrshlq_m(__inactive, __a, __b, __p)
284 #define vrshrq_m(__inactive, __a, __imm, __p) __arm_vrshrq_m(__inactive, __a, __imm, __p)
285 #define vshlq_m_n(__inactive, __a, __imm, __p) __arm_vshlq_m_n(__inactive, __a, __imm, __p)
286 #define vshrq_m(__inactive, __a, __imm, __p) __arm_vshrq_m(__inactive, __a, __imm, __p)
287 #define vsliq_m(__a, __b, __imm, __p) __arm_vsliq_m(__a, __b, __imm, __p)
288 #define vmlaldavaq_p(__a, __b, __c, __p) __arm_vmlaldavaq_p(__a, __b, __c, __p)
289 #define vmlaldavaxq_p(__a, __b, __c, __p) __arm_vmlaldavaxq_p(__a, __b, __c, __p)
290 #define vmlsldavaq_p(__a, __b, __c, __p) __arm_vmlsldavaq_p(__a, __b, __c, __p)
291 #define vmlsldavaxq_p(__a, __b, __c, __p) __arm_vmlsldavaxq_p(__a, __b, __c, __p)
292 #define vmullbq_poly_m(__inactive, __a, __b, __p) __arm_vmullbq_poly_m(__inactive, __a, __b, __p)
293 #define vmulltq_poly_m(__inactive, __a, __b, __p) __arm_vmulltq_poly_m(__inactive, __a, __b, __p)
294 #define vqdmullbq_m(__inactive, __a, __b, __p) __arm_vqdmullbq_m(__inactive, __a, __b, __p)
295 #define vqdmulltq_m(__inactive, __a, __b, __p) __arm_vqdmulltq_m(__inactive, __a, __b, __p)
296 #define vqrshrnbq_m(__a, __b, __imm, __p) __arm_vqrshrnbq_m(__a, __b, __imm, __p)
297 #define vqrshrntq_m(__a, __b, __imm, __p) __arm_vqrshrntq_m(__a, __b, __imm, __p)
298 #define vqrshrunbq_m(__a, __b, __imm, __p) __arm_vqrshrunbq_m(__a, __b, __imm, __p)
299 #define vqrshruntq_m(__a, __b, __imm, __p) __arm_vqrshruntq_m(__a, __b, __imm, __p)
300 #define vqshrnbq_m(__a, __b, __imm, __p) __arm_vqshrnbq_m(__a, __b, __imm, __p)
301 #define vqshrntq_m(__a, __b, __imm, __p) __arm_vqshrntq_m(__a, __b, __imm, __p)
302 #define vqshrunbq_m(__a, __b, __imm, __p) __arm_vqshrunbq_m(__a, __b, __imm, __p)
303 #define vqshruntq_m(__a, __b, __imm, __p) __arm_vqshruntq_m(__a, __b, __imm, __p)
304 #define vrmlaldavhaq_p(__a, __b, __c, __p) __arm_vrmlaldavhaq_p(__a, __b, __c, __p)
305 #define vrmlaldavhaxq_p(__a, __b, __c, __p) __arm_vrmlaldavhaxq_p(__a, __b, __c, __p)
306 #define vrmlsldavhaq_p(__a, __b, __c, __p) __arm_vrmlsldavhaq_p(__a, __b, __c, __p)
307 #define vrmlsldavhaxq_p(__a, __b, __c, __p) __arm_vrmlsldavhaxq_p(__a, __b, __c, __p)
308 #define vrshrnbq_m(__a, __b, __imm, __p) __arm_vrshrnbq_m(__a, __b, __imm, __p)
309 #define vrshrntq_m(__a, __b, __imm, __p) __arm_vrshrntq_m(__a, __b, __imm, __p)
310 #define vshllbq_m(__inactive, __a, __imm, __p) __arm_vshllbq_m(__inactive, __a, __imm, __p)
311 #define vshlltq_m(__inactive, __a, __imm, __p) __arm_vshlltq_m(__inactive, __a, __imm, __p)
312 #define vshrnbq_m(__a, __b, __imm, __p) __arm_vshrnbq_m(__a, __b, __imm, __p)
313 #define vshrntq_m(__a, __b, __imm, __p) __arm_vshrntq_m(__a, __b, __imm, __p)
314 #define vstrbq_scatter_offset(__base, __offset, __value) __arm_vstrbq_scatter_offset(__base, __offset, __value)
315 #define vstrbq(__addr, __value) __arm_vstrbq(__addr, __value)
316 #define vstrwq_scatter_base(__addr, __offset, __value) __arm_vstrwq_scatter_base(__addr, __offset, __value)
317 #define vldrbq_gather_offset(__base, __offset) __arm_vldrbq_gather_offset(__base, __offset)
318 #define vstrbq_p(__addr, __value, __p) __arm_vstrbq_p(__addr, __value, __p)
319 #define vstrbq_scatter_offset_p(__base, __offset, __value, __p) __arm_vstrbq_scatter_offset_p(__base, __offset, __value, __p)
320 #define vstrwq_scatter_base_p(__addr, __offset, __value, __p) __arm_vstrwq_scatter_base_p(__addr, __offset, __value, __p)
321 #define vldrbq_gather_offset_z(__base, __offset, __p) __arm_vldrbq_gather_offset_z(__base, __offset, __p)
322 #define vld1q(__base) __arm_vld1q(__base)
323 #define vldrhq_gather_offset(__base, __offset) __arm_vldrhq_gather_offset(__base, __offset)
324 #define vldrhq_gather_offset_z(__base, __offset, __p) __arm_vldrhq_gather_offset_z(__base, __offset, __p)
325 #define vldrhq_gather_shifted_offset(__base, __offset) __arm_vldrhq_gather_shifted_offset(__base, __offset)
326 #define vldrhq_gather_shifted_offset_z(__base, __offset, __p) __arm_vldrhq_gather_shifted_offset_z(__base, __offset, __p)
327 #define vldrdq_gather_offset(__base, __offset) __arm_vldrdq_gather_offset(__base, __offset)
328 #define vldrdq_gather_offset_z(__base, __offset, __p) __arm_vldrdq_gather_offset_z(__base, __offset, __p)
329 #define vldrdq_gather_shifted_offset(__base, __offset) __arm_vldrdq_gather_shifted_offset(__base, __offset)
330 #define vldrdq_gather_shifted_offset_z(__base, __offset, __p) __arm_vldrdq_gather_shifted_offset_z(__base, __offset, __p)
331 #define vldrwq_gather_offset(__base, __offset) __arm_vldrwq_gather_offset(__base, __offset)
332 #define vldrwq_gather_offset_z(__base, __offset, __p) __arm_vldrwq_gather_offset_z(__base, __offset, __p)
333 #define vldrwq_gather_shifted_offset(__base, __offset) __arm_vldrwq_gather_shifted_offset(__base, __offset)
334 #define vldrwq_gather_shifted_offset_z(__base, __offset, __p) __arm_vldrwq_gather_shifted_offset_z(__base, __offset, __p)
335 #define vst1q(__addr, __value) __arm_vst1q(__addr, __value)
336 #define vstrhq_scatter_offset(__base, __offset, __value) __arm_vstrhq_scatter_offset(__base, __offset, __value)
337 #define vstrhq_scatter_offset_p(__base, __offset, __value, __p) __arm_vstrhq_scatter_offset_p(__base, __offset, __value, __p)
338 #define vstrhq_scatter_shifted_offset(__base, __offset, __value) __arm_vstrhq_scatter_shifted_offset(__base, __offset, __value)
339 #define vstrhq_scatter_shifted_offset_p(__base, __offset, __value, __p) __arm_vstrhq_scatter_shifted_offset_p(__base, __offset, __value, __p)
340 #define vstrhq(__addr, __value) __arm_vstrhq(__addr, __value)
341 #define vstrhq_p(__addr, __value, __p) __arm_vstrhq_p(__addr, __value, __p)
342 #define vstrwq(__addr, __value) __arm_vstrwq(__addr, __value)
343 #define vstrwq_p(__addr, __value, __p) __arm_vstrwq_p(__addr, __value, __p)
344 #define vstrdq_scatter_base_p(__addr, __offset, __value, __p) __arm_vstrdq_scatter_base_p(__addr, __offset, __value, __p)
345 #define vstrdq_scatter_base(__addr, __offset, __value) __arm_vstrdq_scatter_base(__addr, __offset, __value)
346 #define vstrdq_scatter_offset_p(__base, __offset, __value, __p) __arm_vstrdq_scatter_offset_p(__base, __offset, __value, __p)
347 #define vstrdq_scatter_offset(__base, __offset, __value) __arm_vstrdq_scatter_offset(__base, __offset, __value)
348 #define vstrdq_scatter_shifted_offset_p(__base, __offset, __value, __p) __arm_vstrdq_scatter_shifted_offset_p(__base, __offset, __value, __p)
349 #define vstrdq_scatter_shifted_offset(__base, __offset, __value) __arm_vstrdq_scatter_shifted_offset(__base, __offset, __value)
350 #define vstrwq_scatter_offset_p(__base, __offset, __value, __p) __arm_vstrwq_scatter_offset_p(__base, __offset, __value, __p)
351 #define vstrwq_scatter_offset(__base, __offset, __value) __arm_vstrwq_scatter_offset(__base, __offset, __value)
352 #define vstrwq_scatter_shifted_offset_p(__base, __offset, __value, __p) __arm_vstrwq_scatter_shifted_offset_p(__base, __offset, __value, __p)
353 #define vstrwq_scatter_shifted_offset(__base, __offset, __value) __arm_vstrwq_scatter_shifted_offset(__base, __offset, __value)
354 #define vuninitializedq(__v) __arm_vuninitializedq(__v)
355 #define vreinterpretq_s16(__a) __arm_vreinterpretq_s16(__a)
356 #define vreinterpretq_s32(__a) __arm_vreinterpretq_s32(__a)
357 #define vreinterpretq_s64(__a) __arm_vreinterpretq_s64(__a)
358 #define vreinterpretq_s8(__a) __arm_vreinterpretq_s8(__a)
359 #define vreinterpretq_u16(__a) __arm_vreinterpretq_u16(__a)
360 #define vreinterpretq_u32(__a) __arm_vreinterpretq_u32(__a)
361 #define vreinterpretq_u64(__a) __arm_vreinterpretq_u64(__a)
362 #define vreinterpretq_u8(__a) __arm_vreinterpretq_u8(__a)
363 #define vddupq_m(__inactive, __a, __imm, __p) __arm_vddupq_m(__inactive, __a, __imm, __p)
364 #define vddupq_u8(__a, __imm) __arm_vddupq_u8(__a, __imm)
365 #define vddupq_u32(__a, __imm) __arm_vddupq_u32(__a, __imm)
366 #define vddupq_u16(__a, __imm) __arm_vddupq_u16(__a, __imm)
367 #define vdwdupq_m(__inactive, __a, __b, __imm, __p) __arm_vdwdupq_m(__inactive, __a, __b, __imm, __p)
368 #define vdwdupq_u8(__a, __b, __imm) __arm_vdwdupq_u8(__a, __b, __imm)
369 #define vdwdupq_u32(__a, __b, __imm) __arm_vdwdupq_u32(__a, __b, __imm)
370 #define vdwdupq_u16(__a, __b, __imm) __arm_vdwdupq_u16(__a, __b, __imm)
371 #define vidupq_m(__inactive, __a, __imm, __p) __arm_vidupq_m(__inactive, __a, __imm, __p)
372 #define vidupq_u8(__a, __imm) __arm_vidupq_u8(__a, __imm)
373 #define vidupq_u32(__a, __imm) __arm_vidupq_u32(__a, __imm)
374 #define vidupq_u16(__a, __imm) __arm_vidupq_u16(__a, __imm)
375 #define viwdupq_m(__inactive, __a, __b, __imm, __p) __arm_viwdupq_m(__inactive, __a, __b, __imm, __p)
376 #define viwdupq_u8(__a, __b, __imm) __arm_viwdupq_u8(__a, __b, __imm)
377 #define viwdupq_u32(__a, __b, __imm) __arm_viwdupq_u32(__a, __b, __imm)
378 #define viwdupq_u16(__a, __b, __imm) __arm_viwdupq_u16(__a, __b, __imm)
379 #define vstrdq_scatter_base_wb(__addr, __offset, __value) __arm_vstrdq_scatter_base_wb(__addr, __offset, __value)
380 #define vstrdq_scatter_base_wb_p(__addr, __offset, __value, __p) __arm_vstrdq_scatter_base_wb_p(__addr, __offset, __value, __p)
381 #define vstrwq_scatter_base_wb_p(__addr, __offset, __value, __p) __arm_vstrwq_scatter_base_wb_p(__addr, __offset, __value, __p)
382 #define vstrwq_scatter_base_wb(__addr, __offset, __value) __arm_vstrwq_scatter_base_wb(__addr, __offset, __value)
383 #define vddupq_x_u8(__a, __imm, __p) __arm_vddupq_x_u8(__a, __imm, __p)
384 #define vddupq_x_u16(__a, __imm, __p) __arm_vddupq_x_u16(__a, __imm, __p)
385 #define vddupq_x_u32(__a, __imm, __p) __arm_vddupq_x_u32(__a, __imm, __p)
386 #define vdwdupq_x_u8(__a, __b, __imm, __p) __arm_vdwdupq_x_u8(__a, __b, __imm, __p)
387 #define vdwdupq_x_u16(__a, __b, __imm, __p) __arm_vdwdupq_x_u16(__a, __b, __imm, __p)
388 #define vdwdupq_x_u32(__a, __b, __imm, __p) __arm_vdwdupq_x_u32(__a, __b, __imm, __p)
389 #define vidupq_x_u8(__a, __imm, __p) __arm_vidupq_x_u8(__a, __imm, __p)
390 #define vidupq_x_u16(__a, __imm, __p) __arm_vidupq_x_u16(__a, __imm, __p)
391 #define vidupq_x_u32(__a, __imm, __p) __arm_vidupq_x_u32(__a, __imm, __p)
392 #define viwdupq_x_u8(__a, __b, __imm, __p) __arm_viwdupq_x_u8(__a, __b, __imm, __p)
393 #define viwdupq_x_u16(__a, __b, __imm, __p) __arm_viwdupq_x_u16(__a, __b, __imm, __p)
394 #define viwdupq_x_u32(__a, __b, __imm, __p) __arm_viwdupq_x_u32(__a, __b, __imm, __p)
395 #define vminq_x(__a, __b, __p) __arm_vminq_x(__a, __b, __p)
396 #define vmaxq_x(__a, __b, __p) __arm_vmaxq_x(__a, __b, __p)
397 #define vabdq_x(__a, __b, __p) __arm_vabdq_x(__a, __b, __p)
398 #define vabsq_x(__a, __p) __arm_vabsq_x(__a, __p)
399 #define vaddq_x(__a, __b, __p) __arm_vaddq_x(__a, __b, __p)
400 #define vclsq_x(__a, __p) __arm_vclsq_x(__a, __p)
401 #define vclzq_x(__a, __p) __arm_vclzq_x(__a, __p)
402 #define vnegq_x(__a, __p) __arm_vnegq_x(__a, __p)
403 #define vmulhq_x(__a, __b, __p) __arm_vmulhq_x(__a, __b, __p)
404 #define vmullbq_poly_x(__a, __b, __p) __arm_vmullbq_poly_x(__a, __b, __p)
405 #define vmullbq_int_x(__a, __b, __p) __arm_vmullbq_int_x(__a, __b, __p)
406 #define vmulltq_poly_x(__a, __b, __p) __arm_vmulltq_poly_x(__a, __b, __p)
407 #define vmulltq_int_x(__a, __b, __p) __arm_vmulltq_int_x(__a, __b, __p)
408 #define vmulq_x(__a, __b, __p) __arm_vmulq_x(__a, __b, __p)
409 #define vsubq_x(__a, __b, __p) __arm_vsubq_x(__a, __b, __p)
410 #define vcaddq_rot90_x(__a, __b, __p) __arm_vcaddq_rot90_x(__a, __b, __p)
411 #define vcaddq_rot270_x(__a, __b, __p) __arm_vcaddq_rot270_x(__a, __b, __p)
412 #define vhaddq_x(__a, __b, __p) __arm_vhaddq_x(__a, __b, __p)
413 #define vhcaddq_rot90_x(__a, __b, __p) __arm_vhcaddq_rot90_x(__a, __b, __p)
414 #define vhcaddq_rot270_x(__a, __b, __p) __arm_vhcaddq_rot270_x(__a, __b, __p)
415 #define vhsubq_x(__a, __b, __p) __arm_vhsubq_x(__a, __b, __p)
416 #define vrhaddq_x(__a, __b, __p) __arm_vrhaddq_x(__a, __b, __p)
417 #define vrmulhq_x(__a, __b, __p) __arm_vrmulhq_x(__a, __b, __p)
418 #define vandq_x(__a, __b, __p) __arm_vandq_x(__a, __b, __p)
419 #define vbicq_x(__a, __b, __p) __arm_vbicq_x(__a, __b, __p)
420 #define vbrsrq_x(__a, __b, __p) __arm_vbrsrq_x(__a, __b, __p)
421 #define veorq_x(__a, __b, __p) __arm_veorq_x(__a, __b, __p)
422 #define vmovlbq_x(__a, __p) __arm_vmovlbq_x(__a, __p)
423 #define vmovltq_x(__a, __p) __arm_vmovltq_x(__a, __p)
424 #define vmvnq_x(__a, __p) __arm_vmvnq_x(__a, __p)
425 #define vornq_x(__a, __b, __p) __arm_vornq_x(__a, __b, __p)
426 #define vorrq_x(__a, __b, __p) __arm_vorrq_x(__a, __b, __p)
427 #define vrev16q_x(__a, __p) __arm_vrev16q_x(__a, __p)
428 #define vrev32q_x(__a, __p) __arm_vrev32q_x(__a, __p)
429 #define vrev64q_x(__a, __p) __arm_vrev64q_x(__a, __p)
430 #define vrshlq_x(__a, __b, __p) __arm_vrshlq_x(__a, __b, __p)
431 #define vshllbq_x(__a, __imm, __p) __arm_vshllbq_x(__a, __imm, __p)
432 #define vshlltq_x(__a, __imm, __p) __arm_vshlltq_x(__a, __imm, __p)
433 #define vshlq_x(__a, __b, __p) __arm_vshlq_x(__a, __b, __p)
434 #define vshlq_x_n(__a, __imm, __p) __arm_vshlq_x_n(__a, __imm, __p)
435 #define vrshrq_x(__a, __imm, __p) __arm_vrshrq_x(__a, __imm, __p)
436 #define vshrq_x(__a, __imm, __p) __arm_vshrq_x(__a, __imm, __p)
437 #define vadciq(__a, __b, __carry_out) __arm_vadciq(__a, __b, __carry_out)
438 #define vadciq_m(__inactive, __a, __b, __carry_out, __p) __arm_vadciq_m(__inactive, __a, __b, __carry_out, __p)
439 #define vadcq(__a, __b, __carry) __arm_vadcq(__a, __b, __carry)
440 #define vadcq_m(__inactive, __a, __b, __carry, __p) __arm_vadcq_m(__inactive, __a, __b, __carry, __p)
441 #define vsbciq(__a, __b, __carry_out) __arm_vsbciq(__a, __b, __carry_out)
442 #define vsbciq_m(__inactive, __a, __b, __carry_out, __p) __arm_vsbciq_m(__inactive, __a, __b, __carry_out, __p)
443 #define vsbcq(__a, __b, __carry) __arm_vsbcq(__a, __b, __carry)
444 #define vsbcq_m(__inactive, __a, __b, __carry, __p) __arm_vsbcq_m(__inactive, __a, __b, __carry, __p)
445 #define vst1q_p(__addr, __value, __p) __arm_vst1q_p(__addr, __value, __p)
446 #define vst2q(__addr, __value) __arm_vst2q(__addr, __value)
447 #define vld1q_z(__base, __p) __arm_vld1q_z(__base, __p)
448 #define vld2q(__addr) __arm_vld2q(__addr)
449 #define vld4q(__addr) __arm_vld4q(__addr)
450 #define vsetq_lane(__a, __b, __idx) __arm_vsetq_lane(__a, __b, __idx)
451 #define vgetq_lane(__a, __idx) __arm_vgetq_lane(__a, __idx)
452 #define vshlcq_m(__a, __b, __imm, __p) __arm_vshlcq_m(__a, __b, __imm, __p)
453 #define vrndxq(__a) __arm_vrndxq(__a)
454 #define vrndq(__a) __arm_vrndq(__a)
455 #define vrndpq(__a) __arm_vrndpq(__a)
456 #define vrndnq(__a) __arm_vrndnq(__a)
457 #define vrndmq(__a) __arm_vrndmq(__a)
458 #define vrndaq(__a) __arm_vrndaq(__a)
459 #define vcvttq_f32(__a) __arm_vcvttq_f32(__a)
460 #define vcvtbq_f32(__a) __arm_vcvtbq_f32(__a)
461 #define vcvtq(__a) __arm_vcvtq(__a)
462 #define vcvtq_n(__a, __imm6) __arm_vcvtq_n(__a, __imm6)
463 #define vminnmvq(__a, __b) __arm_vminnmvq(__a, __b)
464 #define vminnmq(__a, __b) __arm_vminnmq(__a, __b)
465 #define vminnmavq(__a, __b) __arm_vminnmavq(__a, __b)
466 #define vminnmaq(__a, __b) __arm_vminnmaq(__a, __b)
467 #define vmaxnmvq(__a, __b) __arm_vmaxnmvq(__a, __b)
468 #define vmaxnmq(__a, __b) __arm_vmaxnmq(__a, __b)
469 #define vmaxnmavq(__a, __b) __arm_vmaxnmavq(__a, __b)
470 #define vmaxnmaq(__a, __b) __arm_vmaxnmaq(__a, __b)
471 #define vcmulq_rot90(__a, __b) __arm_vcmulq_rot90(__a, __b)
472 #define vcmulq_rot270(__a, __b) __arm_vcmulq_rot270(__a, __b)
473 #define vcmulq_rot180(__a, __b) __arm_vcmulq_rot180(__a, __b)
474 #define vcmulq(__a, __b) __arm_vcmulq(__a, __b)
475 #define vcvtaq_m(__inactive, __a, __p) __arm_vcvtaq_m(__inactive, __a, __p)
476 #define vcvtq_m(__inactive, __a, __p) __arm_vcvtq_m(__inactive, __a, __p)
477 #define vcvtbq_m(__a, __b, __p) __arm_vcvtbq_m(__a, __b, __p)
478 #define vcvttq_m(__a, __b, __p) __arm_vcvttq_m(__a, __b, __p)
479 #define vcmlaq(__a, __b, __c) __arm_vcmlaq(__a, __b, __c)
480 #define vcmlaq_rot180(__a, __b, __c) __arm_vcmlaq_rot180(__a, __b, __c)
481 #define vcmlaq_rot270(__a, __b, __c) __arm_vcmlaq_rot270(__a, __b, __c)
482 #define vcmlaq_rot90(__a, __b, __c) __arm_vcmlaq_rot90(__a, __b, __c)
483 #define vfmaq(__a, __b, __c) __arm_vfmaq(__a, __b, __c)
484 #define vfmasq(__a, __b, __c) __arm_vfmasq(__a, __b, __c)
485 #define vfmsq(__a, __b, __c) __arm_vfmsq(__a, __b, __c)
486 #define vcvtmq_m(__inactive, __a, __p) __arm_vcvtmq_m(__inactive, __a, __p)
487 #define vcvtnq_m(__inactive, __a, __p) __arm_vcvtnq_m(__inactive, __a, __p)
488 #define vcvtpq_m(__inactive, __a, __p) __arm_vcvtpq_m(__inactive, __a, __p)
489 #define vmaxnmaq_m(__a, __b, __p) __arm_vmaxnmaq_m(__a, __b, __p)
490 #define vmaxnmavq_p(__a, __b, __p) __arm_vmaxnmavq_p(__a, __b, __p)
491 #define vmaxnmvq_p(__a, __b, __p) __arm_vmaxnmvq_p(__a, __b, __p)
492 #define vminnmaq_m(__a, __b, __p) __arm_vminnmaq_m(__a, __b, __p)
493 #define vminnmavq_p(__a, __b, __p) __arm_vminnmavq_p(__a, __b, __p)
494 #define vminnmvq_p(__a, __b, __p) __arm_vminnmvq_p(__a, __b, __p)
495 #define vrndaq_m(__inactive, __a, __p) __arm_vrndaq_m(__inactive, __a, __p)
496 #define vrndmq_m(__inactive, __a, __p) __arm_vrndmq_m(__inactive, __a, __p)
497 #define vrndnq_m(__inactive, __a, __p) __arm_vrndnq_m(__inactive, __a, __p)
498 #define vrndpq_m(__inactive, __a, __p) __arm_vrndpq_m(__inactive, __a, __p)
499 #define vrndq_m(__inactive, __a, __p) __arm_vrndq_m(__inactive, __a, __p)
500 #define vrndxq_m(__inactive, __a, __p) __arm_vrndxq_m(__inactive, __a, __p)
501 #define vcvtq_m_n(__inactive, __a, __imm6, __p) __arm_vcvtq_m_n(__inactive, __a, __imm6, __p)
502 #define vcmlaq_m(__a, __b, __c, __p) __arm_vcmlaq_m(__a, __b, __c, __p)
503 #define vcmlaq_rot180_m(__a, __b, __c, __p) __arm_vcmlaq_rot180_m(__a, __b, __c, __p)
504 #define vcmlaq_rot270_m(__a, __b, __c, __p) __arm_vcmlaq_rot270_m(__a, __b, __c, __p)
505 #define vcmlaq_rot90_m(__a, __b, __c, __p) __arm_vcmlaq_rot90_m(__a, __b, __c, __p)
506 #define vcmulq_m(__inactive, __a, __b, __p) __arm_vcmulq_m(__inactive, __a, __b, __p)
507 #define vcmulq_rot180_m(__inactive, __a, __b, __p) __arm_vcmulq_rot180_m(__inactive, __a, __b, __p)
508 #define vcmulq_rot270_m(__inactive, __a, __b, __p) __arm_vcmulq_rot270_m(__inactive, __a, __b, __p)
509 #define vcmulq_rot90_m(__inactive, __a, __b, __p) __arm_vcmulq_rot90_m(__inactive, __a, __b, __p)
510 #define vfmaq_m(__a, __b, __c, __p) __arm_vfmaq_m(__a, __b, __c, __p)
511 #define vfmasq_m(__a, __b, __c, __p) __arm_vfmasq_m(__a, __b, __c, __p)
512 #define vfmsq_m(__a, __b, __c, __p) __arm_vfmsq_m(__a, __b, __c, __p)
513 #define vmaxnmq_m(__inactive, __a, __b, __p) __arm_vmaxnmq_m(__inactive, __a, __b, __p)
514 #define vminnmq_m(__inactive, __a, __b, __p) __arm_vminnmq_m(__inactive, __a, __b, __p)
515 #define vreinterpretq_f16(__a) __arm_vreinterpretq_f16(__a)
516 #define vreinterpretq_f32(__a) __arm_vreinterpretq_f32(__a)
517 #define vminnmq_x(__a, __b, __p) __arm_vminnmq_x(__a, __b, __p)
518 #define vmaxnmq_x(__a, __b, __p) __arm_vmaxnmq_x(__a, __b, __p)
519 #define vcmulq_x(__a, __b, __p) __arm_vcmulq_x(__a, __b, __p)
520 #define vcmulq_rot90_x(__a, __b, __p) __arm_vcmulq_rot90_x(__a, __b, __p)
521 #define vcmulq_rot180_x(__a, __b, __p) __arm_vcmulq_rot180_x(__a, __b, __p)
522 #define vcmulq_rot270_x(__a, __b, __p) __arm_vcmulq_rot270_x(__a, __b, __p)
523 #define vcvtq_x(__a, __p) __arm_vcvtq_x(__a, __p)
524 #define vcvtq_x_n(__a, __imm6, __p) __arm_vcvtq_x_n(__a, __imm6, __p)
525 #define vrndq_x(__a, __p) __arm_vrndq_x(__a, __p)
526 #define vrndnq_x(__a, __p) __arm_vrndnq_x(__a, __p)
527 #define vrndmq_x(__a, __p) __arm_vrndmq_x(__a, __p)
528 #define vrndpq_x(__a, __p) __arm_vrndpq_x(__a, __p)
529 #define vrndaq_x(__a, __p) __arm_vrndaq_x(__a, __p)
530 #define vrndxq_x(__a, __p) __arm_vrndxq_x(__a, __p)
533 #define vst4q_s8( __addr, __value) __arm_vst4q_s8( __addr, __value)
534 #define vst4q_s16( __addr, __value) __arm_vst4q_s16( __addr, __value)
535 #define vst4q_s32( __addr, __value) __arm_vst4q_s32( __addr, __value)
536 #define vst4q_u8( __addr, __value) __arm_vst4q_u8( __addr, __value)
537 #define vst4q_u16( __addr, __value) __arm_vst4q_u16( __addr, __value)
538 #define vst4q_u32( __addr, __value) __arm_vst4q_u32( __addr, __value)
539 #define vst4q_f16( __addr, __value) __arm_vst4q_f16( __addr, __value)
540 #define vst4q_f32( __addr, __value) __arm_vst4q_f32( __addr, __value)
541 #define vrndxq_f16(__a) __arm_vrndxq_f16(__a)
542 #define vrndxq_f32(__a) __arm_vrndxq_f32(__a)
543 #define vrndq_f16(__a) __arm_vrndq_f16(__a)
544 #define vrndq_f32(__a) __arm_vrndq_f32(__a)
545 #define vrndpq_f16(__a) __arm_vrndpq_f16(__a)
546 #define vrndpq_f32(__a) __arm_vrndpq_f32(__a)
547 #define vrndnq_f16(__a) __arm_vrndnq_f16(__a)
548 #define vrndnq_f32(__a) __arm_vrndnq_f32(__a)
549 #define vrndmq_f16(__a) __arm_vrndmq_f16(__a)
550 #define vrndmq_f32(__a) __arm_vrndmq_f32(__a)
551 #define vrndaq_f16(__a) __arm_vrndaq_f16(__a)
552 #define vrndaq_f32(__a) __arm_vrndaq_f32(__a)
553 #define vrev64q_f16(__a) __arm_vrev64q_f16(__a)
554 #define vrev64q_f32(__a) __arm_vrev64q_f32(__a)
555 #define vnegq_f16(__a) __arm_vnegq_f16(__a)
556 #define vnegq_f32(__a) __arm_vnegq_f32(__a)
557 #define vdupq_n_f16(__a) __arm_vdupq_n_f16(__a)
558 #define vdupq_n_f32(__a) __arm_vdupq_n_f32(__a)
559 #define vabsq_f16(__a) __arm_vabsq_f16(__a)
560 #define vabsq_f32(__a) __arm_vabsq_f32(__a)
561 #define vrev32q_f16(__a) __arm_vrev32q_f16(__a)
562 #define vcvttq_f32_f16(__a) __arm_vcvttq_f32_f16(__a)
563 #define vcvtbq_f32_f16(__a) __arm_vcvtbq_f32_f16(__a)
564 #define vcvtq_f16_s16(__a) __arm_vcvtq_f16_s16(__a)
565 #define vcvtq_f32_s32(__a) __arm_vcvtq_f32_s32(__a)
566 #define vcvtq_f16_u16(__a) __arm_vcvtq_f16_u16(__a)
567 #define vcvtq_f32_u32(__a) __arm_vcvtq_f32_u32(__a)
568 #define vdupq_n_s8(__a) __arm_vdupq_n_s8(__a)
569 #define vdupq_n_s16(__a) __arm_vdupq_n_s16(__a)
570 #define vdupq_n_s32(__a) __arm_vdupq_n_s32(__a)
571 #define vabsq_s8(__a) __arm_vabsq_s8(__a)
572 #define vabsq_s16(__a) __arm_vabsq_s16(__a)
573 #define vabsq_s32(__a) __arm_vabsq_s32(__a)
574 #define vclsq_s8(__a) __arm_vclsq_s8(__a)
575 #define vclsq_s16(__a) __arm_vclsq_s16(__a)
576 #define vclsq_s32(__a) __arm_vclsq_s32(__a)
577 #define vclzq_s8(__a) __arm_vclzq_s8(__a)
578 #define vclzq_s16(__a) __arm_vclzq_s16(__a)
579 #define vclzq_s32(__a) __arm_vclzq_s32(__a)
580 #define vnegq_s8(__a) __arm_vnegq_s8(__a)
581 #define vnegq_s16(__a) __arm_vnegq_s16(__a)
582 #define vnegq_s32(__a) __arm_vnegq_s32(__a)
583 #define vaddlvq_s32(__a) __arm_vaddlvq_s32(__a)
584 #define vaddvq_s8(__a) __arm_vaddvq_s8(__a)
585 #define vaddvq_s16(__a) __arm_vaddvq_s16(__a)
586 #define vaddvq_s32(__a) __arm_vaddvq_s32(__a)
587 #define vmovlbq_s8(__a) __arm_vmovlbq_s8(__a)
588 #define vmovlbq_s16(__a) __arm_vmovlbq_s16(__a)
589 #define vmovltq_s8(__a) __arm_vmovltq_s8(__a)
590 #define vmovltq_s16(__a) __arm_vmovltq_s16(__a)
591 #define vmvnq_s8(__a) __arm_vmvnq_s8(__a)
592 #define vmvnq_s16(__a) __arm_vmvnq_s16(__a)
593 #define vmvnq_s32(__a) __arm_vmvnq_s32(__a)
594 #define vmvnq_n_s16( __imm) __arm_vmvnq_n_s16( __imm)
595 #define vmvnq_n_s32( __imm) __arm_vmvnq_n_s32( __imm)
596 #define vrev16q_s8(__a) __arm_vrev16q_s8(__a)
597 #define vrev32q_s8(__a) __arm_vrev32q_s8(__a)
598 #define vrev32q_s16(__a) __arm_vrev32q_s16(__a)
599 #define vrev64q_s8(__a) __arm_vrev64q_s8(__a)
600 #define vrev64q_s16(__a) __arm_vrev64q_s16(__a)
601 #define vrev64q_s32(__a) __arm_vrev64q_s32(__a)
602 #define vqabsq_s8(__a) __arm_vqabsq_s8(__a)
603 #define vqabsq_s16(__a) __arm_vqabsq_s16(__a)
604 #define vqabsq_s32(__a) __arm_vqabsq_s32(__a)
605 #define vqnegq_s8(__a) __arm_vqnegq_s8(__a)
606 #define vqnegq_s16(__a) __arm_vqnegq_s16(__a)
607 #define vqnegq_s32(__a) __arm_vqnegq_s32(__a)
608 #define vcvtaq_s16_f16(__a) __arm_vcvtaq_s16_f16(__a)
609 #define vcvtaq_s32_f32(__a) __arm_vcvtaq_s32_f32(__a)
610 #define vcvtnq_s16_f16(__a) __arm_vcvtnq_s16_f16(__a)
611 #define vcvtnq_s32_f32(__a) __arm_vcvtnq_s32_f32(__a)
612 #define vcvtpq_s16_f16(__a) __arm_vcvtpq_s16_f16(__a)
613 #define vcvtpq_s32_f32(__a) __arm_vcvtpq_s32_f32(__a)
614 #define vcvtmq_s16_f16(__a) __arm_vcvtmq_s16_f16(__a)
615 #define vcvtmq_s32_f32(__a) __arm_vcvtmq_s32_f32(__a)
616 #define vcvtq_s16_f16(__a) __arm_vcvtq_s16_f16(__a)
617 #define vcvtq_s32_f32(__a) __arm_vcvtq_s32_f32(__a)
618 #define vrev64q_u8(__a) __arm_vrev64q_u8(__a)
619 #define vrev64q_u16(__a) __arm_vrev64q_u16(__a)
620 #define vrev64q_u32(__a) __arm_vrev64q_u32(__a)
621 #define vmvnq_u8(__a) __arm_vmvnq_u8(__a)
622 #define vmvnq_u16(__a) __arm_vmvnq_u16(__a)
623 #define vmvnq_u32(__a) __arm_vmvnq_u32(__a)
624 #define vdupq_n_u8(__a) __arm_vdupq_n_u8(__a)
625 #define vdupq_n_u16(__a) __arm_vdupq_n_u16(__a)
626 #define vdupq_n_u32(__a) __arm_vdupq_n_u32(__a)
627 #define vclzq_u8(__a) __arm_vclzq_u8(__a)
628 #define vclzq_u16(__a) __arm_vclzq_u16(__a)
629 #define vclzq_u32(__a) __arm_vclzq_u32(__a)
630 #define vaddvq_u8(__a) __arm_vaddvq_u8(__a)
631 #define vaddvq_u16(__a) __arm_vaddvq_u16(__a)
632 #define vaddvq_u32(__a) __arm_vaddvq_u32(__a)
633 #define vrev32q_u8(__a) __arm_vrev32q_u8(__a)
634 #define vrev32q_u16(__a) __arm_vrev32q_u16(__a)
635 #define vmovltq_u8(__a) __arm_vmovltq_u8(__a)
636 #define vmovltq_u16(__a) __arm_vmovltq_u16(__a)
637 #define vmovlbq_u8(__a) __arm_vmovlbq_u8(__a)
638 #define vmovlbq_u16(__a) __arm_vmovlbq_u16(__a)
639 #define vmvnq_n_u16( __imm) __arm_vmvnq_n_u16( __imm)
640 #define vmvnq_n_u32( __imm) __arm_vmvnq_n_u32( __imm)
641 #define vrev16q_u8(__a) __arm_vrev16q_u8(__a)
642 #define vaddlvq_u32(__a) __arm_vaddlvq_u32(__a)
643 #define vcvtq_u16_f16(__a) __arm_vcvtq_u16_f16(__a)
644 #define vcvtq_u32_f32(__a) __arm_vcvtq_u32_f32(__a)
645 #define vcvtpq_u16_f16(__a) __arm_vcvtpq_u16_f16(__a)
646 #define vcvtpq_u32_f32(__a) __arm_vcvtpq_u32_f32(__a)
647 #define vcvtnq_u16_f16(__a) __arm_vcvtnq_u16_f16(__a)
648 #define vcvtnq_u32_f32(__a) __arm_vcvtnq_u32_f32(__a)
649 #define vcvtmq_u16_f16(__a) __arm_vcvtmq_u16_f16(__a)
650 #define vcvtmq_u32_f32(__a) __arm_vcvtmq_u32_f32(__a)
651 #define vcvtaq_u16_f16(__a) __arm_vcvtaq_u16_f16(__a)
652 #define vcvtaq_u32_f32(__a) __arm_vcvtaq_u32_f32(__a)
653 #define vctp16q(__a) __arm_vctp16q(__a)
654 #define vctp32q(__a) __arm_vctp32q(__a)
655 #define vctp64q(__a) __arm_vctp64q(__a)
656 #define vctp8q(__a) __arm_vctp8q(__a)
657 #define vpnot(__a) __arm_vpnot(__a)
658 #define vsubq_n_f16(__a, __b) __arm_vsubq_n_f16(__a, __b)
659 #define vsubq_n_f32(__a, __b) __arm_vsubq_n_f32(__a, __b)
660 #define vbrsrq_n_f16(__a, __b) __arm_vbrsrq_n_f16(__a, __b)
661 #define vbrsrq_n_f32(__a, __b) __arm_vbrsrq_n_f32(__a, __b)
662 #define vcvtq_n_f16_s16(__a, __imm6) __arm_vcvtq_n_f16_s16(__a, __imm6)
663 #define vcvtq_n_f32_s32(__a, __imm6) __arm_vcvtq_n_f32_s32(__a, __imm6)
664 #define vcvtq_n_f16_u16(__a, __imm6) __arm_vcvtq_n_f16_u16(__a, __imm6)
665 #define vcvtq_n_f32_u32(__a, __imm6) __arm_vcvtq_n_f32_u32(__a, __imm6)
666 #define vcreateq_f16(__a, __b) __arm_vcreateq_f16(__a, __b)
667 #define vcreateq_f32(__a, __b) __arm_vcreateq_f32(__a, __b)
668 #define vcvtq_n_s16_f16(__a, __imm6) __arm_vcvtq_n_s16_f16(__a, __imm6)
669 #define vcvtq_n_s32_f32(__a, __imm6) __arm_vcvtq_n_s32_f32(__a, __imm6)
670 #define vcvtq_n_u16_f16(__a, __imm6) __arm_vcvtq_n_u16_f16(__a, __imm6)
671 #define vcvtq_n_u32_f32(__a, __imm6) __arm_vcvtq_n_u32_f32(__a, __imm6)
672 #define vcreateq_u8(__a, __b) __arm_vcreateq_u8(__a, __b)
673 #define vcreateq_u16(__a, __b) __arm_vcreateq_u16(__a, __b)
674 #define vcreateq_u32(__a, __b) __arm_vcreateq_u32(__a, __b)
675 #define vcreateq_u64(__a, __b) __arm_vcreateq_u64(__a, __b)
676 #define vcreateq_s8(__a, __b) __arm_vcreateq_s8(__a, __b)
677 #define vcreateq_s16(__a, __b) __arm_vcreateq_s16(__a, __b)
678 #define vcreateq_s32(__a, __b) __arm_vcreateq_s32(__a, __b)
679 #define vcreateq_s64(__a, __b) __arm_vcreateq_s64(__a, __b)
680 #define vshrq_n_s8(__a, __imm) __arm_vshrq_n_s8(__a, __imm)
681 #define vshrq_n_s16(__a, __imm) __arm_vshrq_n_s16(__a, __imm)
682 #define vshrq_n_s32(__a, __imm) __arm_vshrq_n_s32(__a, __imm)
683 #define vshrq_n_u8(__a, __imm) __arm_vshrq_n_u8(__a, __imm)
684 #define vshrq_n_u16(__a, __imm) __arm_vshrq_n_u16(__a, __imm)
685 #define vshrq_n_u32(__a, __imm) __arm_vshrq_n_u32(__a, __imm)
686 #define vaddlvq_p_s32(__a, __p) __arm_vaddlvq_p_s32(__a, __p)
687 #define vaddlvq_p_u32(__a, __p) __arm_vaddlvq_p_u32(__a, __p)
688 #define vcmpneq_s8(__a, __b) __arm_vcmpneq_s8(__a, __b)
689 #define vcmpneq_s16(__a, __b) __arm_vcmpneq_s16(__a, __b)
690 #define vcmpneq_s32(__a, __b) __arm_vcmpneq_s32(__a, __b)
691 #define vcmpneq_u8(__a, __b) __arm_vcmpneq_u8(__a, __b)
692 #define vcmpneq_u16(__a, __b) __arm_vcmpneq_u16(__a, __b)
693 #define vcmpneq_u32(__a, __b) __arm_vcmpneq_u32(__a, __b)
694 #define vshlq_s8(__a, __b) __arm_vshlq_s8(__a, __b)
695 #define vshlq_s16(__a, __b) __arm_vshlq_s16(__a, __b)
696 #define vshlq_s32(__a, __b) __arm_vshlq_s32(__a, __b)
697 #define vshlq_u8(__a, __b) __arm_vshlq_u8(__a, __b)
698 #define vshlq_u16(__a, __b) __arm_vshlq_u16(__a, __b)
699 #define vshlq_u32(__a, __b) __arm_vshlq_u32(__a, __b)
700 #define vsubq_u8(__a, __b) __arm_vsubq_u8(__a, __b)
701 #define vsubq_n_u8(__a, __b) __arm_vsubq_n_u8(__a, __b)
702 #define vrmulhq_u8(__a, __b) __arm_vrmulhq_u8(__a, __b)
703 #define vrhaddq_u8(__a, __b) __arm_vrhaddq_u8(__a, __b)
704 #define vqsubq_u8(__a, __b) __arm_vqsubq_u8(__a, __b)
705 #define vqsubq_n_u8(__a, __b) __arm_vqsubq_n_u8(__a, __b)
706 #define vqaddq_u8(__a, __b) __arm_vqaddq_u8(__a, __b)
707 #define vqaddq_n_u8(__a, __b) __arm_vqaddq_n_u8(__a, __b)
708 #define vorrq_u8(__a, __b) __arm_vorrq_u8(__a, __b)
709 #define vornq_u8(__a, __b) __arm_vornq_u8(__a, __b)
710 #define vmulq_u8(__a, __b) __arm_vmulq_u8(__a, __b)
711 #define vmulq_n_u8(__a, __b) __arm_vmulq_n_u8(__a, __b)
712 #define vmulltq_int_u8(__a, __b) __arm_vmulltq_int_u8(__a, __b)
713 #define vmullbq_int_u8(__a, __b) __arm_vmullbq_int_u8(__a, __b)
714 #define vmulhq_u8(__a, __b) __arm_vmulhq_u8(__a, __b)
715 #define vmladavq_u8(__a, __b) __arm_vmladavq_u8(__a, __b)
716 #define vminvq_u8(__a, __b) __arm_vminvq_u8(__a, __b)
717 #define vminq_u8(__a, __b) __arm_vminq_u8(__a, __b)
718 #define vmaxvq_u8(__a, __b) __arm_vmaxvq_u8(__a, __b)
719 #define vmaxq_u8(__a, __b) __arm_vmaxq_u8(__a, __b)
720 #define vhsubq_u8(__a, __b) __arm_vhsubq_u8(__a, __b)
721 #define vhsubq_n_u8(__a, __b) __arm_vhsubq_n_u8(__a, __b)
722 #define vhaddq_u8(__a, __b) __arm_vhaddq_u8(__a, __b)
723 #define vhaddq_n_u8(__a, __b) __arm_vhaddq_n_u8(__a, __b)
724 #define veorq_u8(__a, __b) __arm_veorq_u8(__a, __b)
725 #define vcmpneq_n_u8(__a, __b) __arm_vcmpneq_n_u8(__a, __b)
726 #define vcmphiq_u8(__a, __b) __arm_vcmphiq_u8(__a, __b)
727 #define vcmphiq_n_u8(__a, __b) __arm_vcmphiq_n_u8(__a, __b)
728 #define vcmpeqq_u8(__a, __b) __arm_vcmpeqq_u8(__a, __b)
729 #define vcmpeqq_n_u8(__a, __b) __arm_vcmpeqq_n_u8(__a, __b)
730 #define vcmpcsq_u8(__a, __b) __arm_vcmpcsq_u8(__a, __b)
731 #define vcmpcsq_n_u8(__a, __b) __arm_vcmpcsq_n_u8(__a, __b)
732 #define vcaddq_rot90_u8(__a, __b) __arm_vcaddq_rot90_u8(__a, __b)
733 #define vcaddq_rot270_u8(__a, __b) __arm_vcaddq_rot270_u8(__a, __b)
734 #define vbicq_u8(__a, __b) __arm_vbicq_u8(__a, __b)
735 #define vandq_u8(__a, __b) __arm_vandq_u8(__a, __b)
736 #define vaddvq_p_u8(__a, __p) __arm_vaddvq_p_u8(__a, __p)
737 #define vaddvaq_u8(__a, __b) __arm_vaddvaq_u8(__a, __b)
738 #define vaddq_n_u8(__a, __b) __arm_vaddq_n_u8(__a, __b)
739 #define vabdq_u8(__a, __b) __arm_vabdq_u8(__a, __b)
740 #define vshlq_r_u8(__a, __b) __arm_vshlq_r_u8(__a, __b)
741 #define vrshlq_u8(__a, __b) __arm_vrshlq_u8(__a, __b)
742 #define vrshlq_n_u8(__a, __b) __arm_vrshlq_n_u8(__a, __b)
743 #define vqshlq_u8(__a, __b) __arm_vqshlq_u8(__a, __b)
744 #define vqshlq_r_u8(__a, __b) __arm_vqshlq_r_u8(__a, __b)
745 #define vqrshlq_u8(__a, __b) __arm_vqrshlq_u8(__a, __b)
746 #define vqrshlq_n_u8(__a, __b) __arm_vqrshlq_n_u8(__a, __b)
747 #define vminavq_s8(__a, __b) __arm_vminavq_s8(__a, __b)
748 #define vminaq_s8(__a, __b) __arm_vminaq_s8(__a, __b)
749 #define vmaxavq_s8(__a, __b) __arm_vmaxavq_s8(__a, __b)
750 #define vmaxaq_s8(__a, __b) __arm_vmaxaq_s8(__a, __b)
751 #define vbrsrq_n_u8(__a, __b) __arm_vbrsrq_n_u8(__a, __b)
752 #define vshlq_n_u8(__a, __imm) __arm_vshlq_n_u8(__a, __imm)
753 #define vrshrq_n_u8(__a, __imm) __arm_vrshrq_n_u8(__a, __imm)
754 #define vqshlq_n_u8(__a, __imm) __arm_vqshlq_n_u8(__a, __imm)
755 #define vcmpneq_n_s8(__a, __b) __arm_vcmpneq_n_s8(__a, __b)
756 #define vcmpltq_s8(__a, __b) __arm_vcmpltq_s8(__a, __b)
757 #define vcmpltq_n_s8(__a, __b) __arm_vcmpltq_n_s8(__a, __b)
758 #define vcmpleq_s8(__a, __b) __arm_vcmpleq_s8(__a, __b)
759 #define vcmpleq_n_s8(__a, __b) __arm_vcmpleq_n_s8(__a, __b)
760 #define vcmpgtq_s8(__a, __b) __arm_vcmpgtq_s8(__a, __b)
761 #define vcmpgtq_n_s8(__a, __b) __arm_vcmpgtq_n_s8(__a, __b)
762 #define vcmpgeq_s8(__a, __b) __arm_vcmpgeq_s8(__a, __b)
763 #define vcmpgeq_n_s8(__a, __b) __arm_vcmpgeq_n_s8(__a, __b)
764 #define vcmpeqq_s8(__a, __b) __arm_vcmpeqq_s8(__a, __b)
765 #define vcmpeqq_n_s8(__a, __b) __arm_vcmpeqq_n_s8(__a, __b)
766 #define vqshluq_n_s8(__a, __imm) __arm_vqshluq_n_s8(__a, __imm)
767 #define vaddvq_p_s8(__a, __p) __arm_vaddvq_p_s8(__a, __p)
768 #define vsubq_s8(__a, __b) __arm_vsubq_s8(__a, __b)
769 #define vsubq_n_s8(__a, __b) __arm_vsubq_n_s8(__a, __b)
770 #define vshlq_r_s8(__a, __b) __arm_vshlq_r_s8(__a, __b)
771 #define vrshlq_s8(__a, __b) __arm_vrshlq_s8(__a, __b)
772 #define vrshlq_n_s8(__a, __b) __arm_vrshlq_n_s8(__a, __b)
773 #define vrmulhq_s8(__a, __b) __arm_vrmulhq_s8(__a, __b)
774 #define vrhaddq_s8(__a, __b) __arm_vrhaddq_s8(__a, __b)
775 #define vqsubq_s8(__a, __b) __arm_vqsubq_s8(__a, __b)
776 #define vqsubq_n_s8(__a, __b) __arm_vqsubq_n_s8(__a, __b)
777 #define vqshlq_s8(__a, __b) __arm_vqshlq_s8(__a, __b)
778 #define vqshlq_r_s8(__a, __b) __arm_vqshlq_r_s8(__a, __b)
779 #define vqrshlq_s8(__a, __b) __arm_vqrshlq_s8(__a, __b)
780 #define vqrshlq_n_s8(__a, __b) __arm_vqrshlq_n_s8(__a, __b)
781 #define vqrdmulhq_s8(__a, __b) __arm_vqrdmulhq_s8(__a, __b)
782 #define vqrdmulhq_n_s8(__a, __b) __arm_vqrdmulhq_n_s8(__a, __b)
783 #define vqdmulhq_s8(__a, __b) __arm_vqdmulhq_s8(__a, __b)
784 #define vqdmulhq_n_s8(__a, __b) __arm_vqdmulhq_n_s8(__a, __b)
785 #define vqaddq_s8(__a, __b) __arm_vqaddq_s8(__a, __b)
786 #define vqaddq_n_s8(__a, __b) __arm_vqaddq_n_s8(__a, __b)
787 #define vorrq_s8(__a, __b) __arm_vorrq_s8(__a, __b)
788 #define vornq_s8(__a, __b) __arm_vornq_s8(__a, __b)
789 #define vmulq_s8(__a, __b) __arm_vmulq_s8(__a, __b)
790 #define vmulq_n_s8(__a, __b) __arm_vmulq_n_s8(__a, __b)
791 #define vmulltq_int_s8(__a, __b) __arm_vmulltq_int_s8(__a, __b)
792 #define vmullbq_int_s8(__a, __b) __arm_vmullbq_int_s8(__a, __b)
793 #define vmulhq_s8(__a, __b) __arm_vmulhq_s8(__a, __b)
794 #define vmlsdavxq_s8(__a, __b) __arm_vmlsdavxq_s8(__a, __b)
795 #define vmlsdavq_s8(__a, __b) __arm_vmlsdavq_s8(__a, __b)
796 #define vmladavxq_s8(__a, __b) __arm_vmladavxq_s8(__a, __b)
797 #define vmladavq_s8(__a, __b) __arm_vmladavq_s8(__a, __b)
798 #define vminvq_s8(__a, __b) __arm_vminvq_s8(__a, __b)
799 #define vminq_s8(__a, __b) __arm_vminq_s8(__a, __b)
800 #define vmaxvq_s8(__a, __b) __arm_vmaxvq_s8(__a, __b)
801 #define vmaxq_s8(__a, __b) __arm_vmaxq_s8(__a, __b)
802 #define vhsubq_s8(__a, __b) __arm_vhsubq_s8(__a, __b)
803 #define vhsubq_n_s8(__a, __b) __arm_vhsubq_n_s8(__a, __b)
804 #define vhcaddq_rot90_s8(__a, __b) __arm_vhcaddq_rot90_s8(__a, __b)
805 #define vhcaddq_rot270_s8(__a, __b) __arm_vhcaddq_rot270_s8(__a, __b)
806 #define vhaddq_s8(__a, __b) __arm_vhaddq_s8(__a, __b)
807 #define vhaddq_n_s8(__a, __b) __arm_vhaddq_n_s8(__a, __b)
808 #define veorq_s8(__a, __b) __arm_veorq_s8(__a, __b)
809 #define vcaddq_rot90_s8(__a, __b) __arm_vcaddq_rot90_s8(__a, __b)
810 #define vcaddq_rot270_s8(__a, __b) __arm_vcaddq_rot270_s8(__a, __b)
811 #define vbrsrq_n_s8(__a, __b) __arm_vbrsrq_n_s8(__a, __b)
812 #define vbicq_s8(__a, __b) __arm_vbicq_s8(__a, __b)
813 #define vandq_s8(__a, __b) __arm_vandq_s8(__a, __b)
814 #define vaddvaq_s8(__a, __b) __arm_vaddvaq_s8(__a, __b)
815 #define vaddq_n_s8(__a, __b) __arm_vaddq_n_s8(__a, __b)
816 #define vabdq_s8(__a, __b) __arm_vabdq_s8(__a, __b)
817 #define vshlq_n_s8(__a, __imm) __arm_vshlq_n_s8(__a, __imm)
818 #define vrshrq_n_s8(__a, __imm) __arm_vrshrq_n_s8(__a, __imm)
819 #define vqshlq_n_s8(__a, __imm) __arm_vqshlq_n_s8(__a, __imm)
820 #define vsubq_u16(__a, __b) __arm_vsubq_u16(__a, __b)
821 #define vsubq_n_u16(__a, __b) __arm_vsubq_n_u16(__a, __b)
822 #define vrmulhq_u16(__a, __b) __arm_vrmulhq_u16(__a, __b)
823 #define vrhaddq_u16(__a, __b) __arm_vrhaddq_u16(__a, __b)
824 #define vqsubq_u16(__a, __b) __arm_vqsubq_u16(__a, __b)
825 #define vqsubq_n_u16(__a, __b) __arm_vqsubq_n_u16(__a, __b)
826 #define vqaddq_u16(__a, __b) __arm_vqaddq_u16(__a, __b)
827 #define vqaddq_n_u16(__a, __b) __arm_vqaddq_n_u16(__a, __b)
828 #define vorrq_u16(__a, __b) __arm_vorrq_u16(__a, __b)
829 #define vornq_u16(__a, __b) __arm_vornq_u16(__a, __b)
830 #define vmulq_u16(__a, __b) __arm_vmulq_u16(__a, __b)
831 #define vmulq_n_u16(__a, __b) __arm_vmulq_n_u16(__a, __b)
832 #define vmulltq_int_u16(__a, __b) __arm_vmulltq_int_u16(__a, __b)
833 #define vmullbq_int_u16(__a, __b) __arm_vmullbq_int_u16(__a, __b)
834 #define vmulhq_u16(__a, __b) __arm_vmulhq_u16(__a, __b)
835 #define vmladavq_u16(__a, __b) __arm_vmladavq_u16(__a, __b)
836 #define vminvq_u16(__a, __b) __arm_vminvq_u16(__a, __b)
837 #define vminq_u16(__a, __b) __arm_vminq_u16(__a, __b)
838 #define vmaxvq_u16(__a, __b) __arm_vmaxvq_u16(__a, __b)
839 #define vmaxq_u16(__a, __b) __arm_vmaxq_u16(__a, __b)
840 #define vhsubq_u16(__a, __b) __arm_vhsubq_u16(__a, __b)
841 #define vhsubq_n_u16(__a, __b) __arm_vhsubq_n_u16(__a, __b)
842 #define vhaddq_u16(__a, __b) __arm_vhaddq_u16(__a, __b)
843 #define vhaddq_n_u16(__a, __b) __arm_vhaddq_n_u16(__a, __b)
844 #define veorq_u16(__a, __b) __arm_veorq_u16(__a, __b)
845 #define vcmpneq_n_u16(__a, __b) __arm_vcmpneq_n_u16(__a, __b)
846 #define vcmphiq_u16(__a, __b) __arm_vcmphiq_u16(__a, __b)
847 #define vcmphiq_n_u16(__a, __b) __arm_vcmphiq_n_u16(__a, __b)
848 #define vcmpeqq_u16(__a, __b) __arm_vcmpeqq_u16(__a, __b)
849 #define vcmpeqq_n_u16(__a, __b) __arm_vcmpeqq_n_u16(__a, __b)
850 #define vcmpcsq_u16(__a, __b) __arm_vcmpcsq_u16(__a, __b)
851 #define vcmpcsq_n_u16(__a, __b) __arm_vcmpcsq_n_u16(__a, __b)
852 #define vcaddq_rot90_u16(__a, __b) __arm_vcaddq_rot90_u16(__a, __b)
853 #define vcaddq_rot270_u16(__a, __b) __arm_vcaddq_rot270_u16(__a, __b)
854 #define vbicq_u16(__a, __b) __arm_vbicq_u16(__a, __b)
855 #define vandq_u16(__a, __b) __arm_vandq_u16(__a, __b)
856 #define vaddvq_p_u16(__a, __p) __arm_vaddvq_p_u16(__a, __p)
857 #define vaddvaq_u16(__a, __b) __arm_vaddvaq_u16(__a, __b)
858 #define vaddq_n_u16(__a, __b) __arm_vaddq_n_u16(__a, __b)
859 #define vabdq_u16(__a, __b) __arm_vabdq_u16(__a, __b)
860 #define vshlq_r_u16(__a, __b) __arm_vshlq_r_u16(__a, __b)
861 #define vrshlq_u16(__a, __b) __arm_vrshlq_u16(__a, __b)
862 #define vrshlq_n_u16(__a, __b) __arm_vrshlq_n_u16(__a, __b)
863 #define vqshlq_u16(__a, __b) __arm_vqshlq_u16(__a, __b)
864 #define vqshlq_r_u16(__a, __b) __arm_vqshlq_r_u16(__a, __b)
865 #define vqrshlq_u16(__a, __b) __arm_vqrshlq_u16(__a, __b)
866 #define vqrshlq_n_u16(__a, __b) __arm_vqrshlq_n_u16(__a, __b)
867 #define vminavq_s16(__a, __b) __arm_vminavq_s16(__a, __b)
868 #define vminaq_s16(__a, __b) __arm_vminaq_s16(__a, __b)
869 #define vmaxavq_s16(__a, __b) __arm_vmaxavq_s16(__a, __b)
870 #define vmaxaq_s16(__a, __b) __arm_vmaxaq_s16(__a, __b)
871 #define vbrsrq_n_u16(__a, __b) __arm_vbrsrq_n_u16(__a, __b)
872 #define vshlq_n_u16(__a, __imm) __arm_vshlq_n_u16(__a, __imm)
873 #define vrshrq_n_u16(__a, __imm) __arm_vrshrq_n_u16(__a, __imm)
874 #define vqshlq_n_u16(__a, __imm) __arm_vqshlq_n_u16(__a, __imm)
875 #define vcmpneq_n_s16(__a, __b) __arm_vcmpneq_n_s16(__a, __b)
876 #define vcmpltq_s16(__a, __b) __arm_vcmpltq_s16(__a, __b)
877 #define vcmpltq_n_s16(__a, __b) __arm_vcmpltq_n_s16(__a, __b)
878 #define vcmpleq_s16(__a, __b) __arm_vcmpleq_s16(__a, __b)
879 #define vcmpleq_n_s16(__a, __b) __arm_vcmpleq_n_s16(__a, __b)
880 #define vcmpgtq_s16(__a, __b) __arm_vcmpgtq_s16(__a, __b)
881 #define vcmpgtq_n_s16(__a, __b) __arm_vcmpgtq_n_s16(__a, __b)
882 #define vcmpgeq_s16(__a, __b) __arm_vcmpgeq_s16(__a, __b)
883 #define vcmpgeq_n_s16(__a, __b) __arm_vcmpgeq_n_s16(__a, __b)
884 #define vcmpeqq_s16(__a, __b) __arm_vcmpeqq_s16(__a, __b)
885 #define vcmpeqq_n_s16(__a, __b) __arm_vcmpeqq_n_s16(__a, __b)
886 #define vqshluq_n_s16(__a, __imm) __arm_vqshluq_n_s16(__a, __imm)
887 #define vaddvq_p_s16(__a, __p) __arm_vaddvq_p_s16(__a, __p)
888 #define vsubq_s16(__a, __b) __arm_vsubq_s16(__a, __b)
889 #define vsubq_n_s16(__a, __b) __arm_vsubq_n_s16(__a, __b)
890 #define vshlq_r_s16(__a, __b) __arm_vshlq_r_s16(__a, __b)
891 #define vrshlq_s16(__a, __b) __arm_vrshlq_s16(__a, __b)
892 #define vrshlq_n_s16(__a, __b) __arm_vrshlq_n_s16(__a, __b)
893 #define vrmulhq_s16(__a, __b) __arm_vrmulhq_s16(__a, __b)
894 #define vrhaddq_s16(__a, __b) __arm_vrhaddq_s16(__a, __b)
895 #define vqsubq_s16(__a, __b) __arm_vqsubq_s16(__a, __b)
896 #define vqsubq_n_s16(__a, __b) __arm_vqsubq_n_s16(__a, __b)
897 #define vqshlq_s16(__a, __b) __arm_vqshlq_s16(__a, __b)
898 #define vqshlq_r_s16(__a, __b) __arm_vqshlq_r_s16(__a, __b)
899 #define vqrshlq_s16(__a, __b) __arm_vqrshlq_s16(__a, __b)
900 #define vqrshlq_n_s16(__a, __b) __arm_vqrshlq_n_s16(__a, __b)
901 #define vqrdmulhq_s16(__a, __b) __arm_vqrdmulhq_s16(__a, __b)
902 #define vqrdmulhq_n_s16(__a, __b) __arm_vqrdmulhq_n_s16(__a, __b)
903 #define vqdmulhq_s16(__a, __b) __arm_vqdmulhq_s16(__a, __b)
904 #define vqdmulhq_n_s16(__a, __b) __arm_vqdmulhq_n_s16(__a, __b)
905 #define vqaddq_s16(__a, __b) __arm_vqaddq_s16(__a, __b)
906 #define vqaddq_n_s16(__a, __b) __arm_vqaddq_n_s16(__a, __b)
907 #define vorrq_s16(__a, __b) __arm_vorrq_s16(__a, __b)
908 #define vornq_s16(__a, __b) __arm_vornq_s16(__a, __b)
909 #define vmulq_s16(__a, __b) __arm_vmulq_s16(__a, __b)
910 #define vmulq_n_s16(__a, __b) __arm_vmulq_n_s16(__a, __b)
911 #define vmulltq_int_s16(__a, __b) __arm_vmulltq_int_s16(__a, __b)
912 #define vmullbq_int_s16(__a, __b) __arm_vmullbq_int_s16(__a, __b)
913 #define vmulhq_s16(__a, __b) __arm_vmulhq_s16(__a, __b)
914 #define vmlsdavxq_s16(__a, __b) __arm_vmlsdavxq_s16(__a, __b)
915 #define vmlsdavq_s16(__a, __b) __arm_vmlsdavq_s16(__a, __b)
916 #define vmladavxq_s16(__a, __b) __arm_vmladavxq_s16(__a, __b)
917 #define vmladavq_s16(__a, __b) __arm_vmladavq_s16(__a, __b)
918 #define vminvq_s16(__a, __b) __arm_vminvq_s16(__a, __b)
919 #define vminq_s16(__a, __b) __arm_vminq_s16(__a, __b)
920 #define vmaxvq_s16(__a, __b) __arm_vmaxvq_s16(__a, __b)
921 #define vmaxq_s16(__a, __b) __arm_vmaxq_s16(__a, __b)
922 #define vhsubq_s16(__a, __b) __arm_vhsubq_s16(__a, __b)
923 #define vhsubq_n_s16(__a, __b) __arm_vhsubq_n_s16(__a, __b)
924 #define vhcaddq_rot90_s16(__a, __b) __arm_vhcaddq_rot90_s16(__a, __b)
925 #define vhcaddq_rot270_s16(__a, __b) __arm_vhcaddq_rot270_s16(__a, __b)
926 #define vhaddq_s16(__a, __b) __arm_vhaddq_s16(__a, __b)
927 #define vhaddq_n_s16(__a, __b) __arm_vhaddq_n_s16(__a, __b)
928 #define veorq_s16(__a, __b) __arm_veorq_s16(__a, __b)
929 #define vcaddq_rot90_s16(__a, __b) __arm_vcaddq_rot90_s16(__a, __b)
930 #define vcaddq_rot270_s16(__a, __b) __arm_vcaddq_rot270_s16(__a, __b)
931 #define vbrsrq_n_s16(__a, __b) __arm_vbrsrq_n_s16(__a, __b)
932 #define vbicq_s16(__a, __b) __arm_vbicq_s16(__a, __b)
933 #define vandq_s16(__a, __b) __arm_vandq_s16(__a, __b)
934 #define vaddvaq_s16(__a, __b) __arm_vaddvaq_s16(__a, __b)
935 #define vaddq_n_s16(__a, __b) __arm_vaddq_n_s16(__a, __b)
936 #define vabdq_s16(__a, __b) __arm_vabdq_s16(__a, __b)
937 #define vshlq_n_s16(__a, __imm) __arm_vshlq_n_s16(__a, __imm)
938 #define vrshrq_n_s16(__a, __imm) __arm_vrshrq_n_s16(__a, __imm)
939 #define vqshlq_n_s16(__a, __imm) __arm_vqshlq_n_s16(__a, __imm)
940 #define vsubq_u32(__a, __b) __arm_vsubq_u32(__a, __b)
941 #define vsubq_n_u32(__a, __b) __arm_vsubq_n_u32(__a, __b)
942 #define vrmulhq_u32(__a, __b) __arm_vrmulhq_u32(__a, __b)
943 #define vrhaddq_u32(__a, __b) __arm_vrhaddq_u32(__a, __b)
944 #define vqsubq_u32(__a, __b) __arm_vqsubq_u32(__a, __b)
945 #define vqsubq_n_u32(__a, __b) __arm_vqsubq_n_u32(__a, __b)
946 #define vqaddq_u32(__a, __b) __arm_vqaddq_u32(__a, __b)
947 #define vqaddq_n_u32(__a, __b) __arm_vqaddq_n_u32(__a, __b)
948 #define vorrq_u32(__a, __b) __arm_vorrq_u32(__a, __b)
949 #define vornq_u32(__a, __b) __arm_vornq_u32(__a, __b)
950 #define vmulq_u32(__a, __b) __arm_vmulq_u32(__a, __b)
951 #define vmulq_n_u32(__a, __b) __arm_vmulq_n_u32(__a, __b)
952 #define vmulltq_int_u32(__a, __b) __arm_vmulltq_int_u32(__a, __b)
953 #define vmullbq_int_u32(__a, __b) __arm_vmullbq_int_u32(__a, __b)
954 #define vmulhq_u32(__a, __b) __arm_vmulhq_u32(__a, __b)
955 #define vmladavq_u32(__a, __b) __arm_vmladavq_u32(__a, __b)
956 #define vminvq_u32(__a, __b) __arm_vminvq_u32(__a, __b)
957 #define vminq_u32(__a, __b) __arm_vminq_u32(__a, __b)
958 #define vmaxvq_u32(__a, __b) __arm_vmaxvq_u32(__a, __b)
959 #define vmaxq_u32(__a, __b) __arm_vmaxq_u32(__a, __b)
960 #define vhsubq_u32(__a, __b) __arm_vhsubq_u32(__a, __b)
961 #define vhsubq_n_u32(__a, __b) __arm_vhsubq_n_u32(__a, __b)
962 #define vhaddq_u32(__a, __b) __arm_vhaddq_u32(__a, __b)
963 #define vhaddq_n_u32(__a, __b) __arm_vhaddq_n_u32(__a, __b)
964 #define veorq_u32(__a, __b) __arm_veorq_u32(__a, __b)
965 #define vcmpneq_n_u32(__a, __b) __arm_vcmpneq_n_u32(__a, __b)
966 #define vcmphiq_u32(__a, __b) __arm_vcmphiq_u32(__a, __b)
967 #define vcmphiq_n_u32(__a, __b) __arm_vcmphiq_n_u32(__a, __b)
968 #define vcmpeqq_u32(__a, __b) __arm_vcmpeqq_u32(__a, __b)
969 #define vcmpeqq_n_u32(__a, __b) __arm_vcmpeqq_n_u32(__a, __b)
970 #define vcmpcsq_u32(__a, __b) __arm_vcmpcsq_u32(__a, __b)
971 #define vcmpcsq_n_u32(__a, __b) __arm_vcmpcsq_n_u32(__a, __b)
972 #define vcaddq_rot90_u32(__a, __b) __arm_vcaddq_rot90_u32(__a, __b)
973 #define vcaddq_rot270_u32(__a, __b) __arm_vcaddq_rot270_u32(__a, __b)
974 #define vbicq_u32(__a, __b) __arm_vbicq_u32(__a, __b)
975 #define vandq_u32(__a, __b) __arm_vandq_u32(__a, __b)
976 #define vaddvq_p_u32(__a, __p) __arm_vaddvq_p_u32(__a, __p)
977 #define vaddvaq_u32(__a, __b) __arm_vaddvaq_u32(__a, __b)
978 #define vaddq_n_u32(__a, __b) __arm_vaddq_n_u32(__a, __b)
979 #define vabdq_u32(__a, __b) __arm_vabdq_u32(__a, __b)
980 #define vshlq_r_u32(__a, __b) __arm_vshlq_r_u32(__a, __b)
981 #define vrshlq_u32(__a, __b) __arm_vrshlq_u32(__a, __b)
982 #define vrshlq_n_u32(__a, __b) __arm_vrshlq_n_u32(__a, __b)
983 #define vqshlq_u32(__a, __b) __arm_vqshlq_u32(__a, __b)
984 #define vqshlq_r_u32(__a, __b) __arm_vqshlq_r_u32(__a, __b)
985 #define vqrshlq_u32(__a, __b) __arm_vqrshlq_u32(__a, __b)
986 #define vqrshlq_n_u32(__a, __b) __arm_vqrshlq_n_u32(__a, __b)
987 #define vminavq_s32(__a, __b) __arm_vminavq_s32(__a, __b)
988 #define vminaq_s32(__a, __b) __arm_vminaq_s32(__a, __b)
989 #define vmaxavq_s32(__a, __b) __arm_vmaxavq_s32(__a, __b)
990 #define vmaxaq_s32(__a, __b) __arm_vmaxaq_s32(__a, __b)
991 #define vbrsrq_n_u32(__a, __b) __arm_vbrsrq_n_u32(__a, __b)
992 #define vshlq_n_u32(__a, __imm) __arm_vshlq_n_u32(__a, __imm)
993 #define vrshrq_n_u32(__a, __imm) __arm_vrshrq_n_u32(__a, __imm)
994 #define vqshlq_n_u32(__a, __imm) __arm_vqshlq_n_u32(__a, __imm)
995 #define vcmpneq_n_s32(__a, __b) __arm_vcmpneq_n_s32(__a, __b)
996 #define vcmpltq_s32(__a, __b) __arm_vcmpltq_s32(__a, __b)
997 #define vcmpltq_n_s32(__a, __b) __arm_vcmpltq_n_s32(__a, __b)
998 #define vcmpleq_s32(__a, __b) __arm_vcmpleq_s32(__a, __b)
999 #define vcmpleq_n_s32(__a, __b) __arm_vcmpleq_n_s32(__a, __b)
1000 #define vcmpgtq_s32(__a, __b) __arm_vcmpgtq_s32(__a, __b)
1001 #define vcmpgtq_n_s32(__a, __b) __arm_vcmpgtq_n_s32(__a, __b)
1002 #define vcmpgeq_s32(__a, __b) __arm_vcmpgeq_s32(__a, __b)
1003 #define vcmpgeq_n_s32(__a, __b) __arm_vcmpgeq_n_s32(__a, __b)
1004 #define vcmpeqq_s32(__a, __b) __arm_vcmpeqq_s32(__a, __b)
1005 #define vcmpeqq_n_s32(__a, __b) __arm_vcmpeqq_n_s32(__a, __b)
1006 #define vqshluq_n_s32(__a, __imm) __arm_vqshluq_n_s32(__a, __imm)
1007 #define vaddvq_p_s32(__a, __p) __arm_vaddvq_p_s32(__a, __p)
1008 #define vsubq_s32(__a, __b) __arm_vsubq_s32(__a, __b)
1009 #define vsubq_n_s32(__a, __b) __arm_vsubq_n_s32(__a, __b)
1010 #define vshlq_r_s32(__a, __b) __arm_vshlq_r_s32(__a, __b)
1011 #define vrshlq_s32(__a, __b) __arm_vrshlq_s32(__a, __b)
1012 #define vrshlq_n_s32(__a, __b) __arm_vrshlq_n_s32(__a, __b)
1013 #define vrmulhq_s32(__a, __b) __arm_vrmulhq_s32(__a, __b)
1014 #define vrhaddq_s32(__a, __b) __arm_vrhaddq_s32(__a, __b)
1015 #define vqsubq_s32(__a, __b) __arm_vqsubq_s32(__a, __b)
1016 #define vqsubq_n_s32(__a, __b) __arm_vqsubq_n_s32(__a, __b)
1017 #define vqshlq_s32(__a, __b) __arm_vqshlq_s32(__a, __b)
1018 #define vqshlq_r_s32(__a, __b) __arm_vqshlq_r_s32(__a, __b)
1019 #define vqrshlq_s32(__a, __b) __arm_vqrshlq_s32(__a, __b)
1020 #define vqrshlq_n_s32(__a, __b) __arm_vqrshlq_n_s32(__a, __b)
1021 #define vqrdmulhq_s32(__a, __b) __arm_vqrdmulhq_s32(__a, __b)
1022 #define vqrdmulhq_n_s32(__a, __b) __arm_vqrdmulhq_n_s32(__a, __b)
1023 #define vqdmulhq_s32(__a, __b) __arm_vqdmulhq_s32(__a, __b)
1024 #define vqdmulhq_n_s32(__a, __b) __arm_vqdmulhq_n_s32(__a, __b)
1025 #define vqaddq_s32(__a, __b) __arm_vqaddq_s32(__a, __b)
1026 #define vqaddq_n_s32(__a, __b) __arm_vqaddq_n_s32(__a, __b)
1027 #define vorrq_s32(__a, __b) __arm_vorrq_s32(__a, __b)
1028 #define vornq_s32(__a, __b) __arm_vornq_s32(__a, __b)
1029 #define vmulq_s32(__a, __b) __arm_vmulq_s32(__a, __b)
1030 #define vmulq_n_s32(__a, __b) __arm_vmulq_n_s32(__a, __b)
1031 #define vmulltq_int_s32(__a, __b) __arm_vmulltq_int_s32(__a, __b)
1032 #define vmullbq_int_s32(__a, __b) __arm_vmullbq_int_s32(__a, __b)
1033 #define vmulhq_s32(__a, __b) __arm_vmulhq_s32(__a, __b)
1034 #define vmlsdavxq_s32(__a, __b) __arm_vmlsdavxq_s32(__a, __b)
1035 #define vmlsdavq_s32(__a, __b) __arm_vmlsdavq_s32(__a, __b)
1036 #define vmladavxq_s32(__a, __b) __arm_vmladavxq_s32(__a, __b)
1037 #define vmladavq_s32(__a, __b) __arm_vmladavq_s32(__a, __b)
1038 #define vminvq_s32(__a, __b) __arm_vminvq_s32(__a, __b)
1039 #define vminq_s32(__a, __b) __arm_vminq_s32(__a, __b)
1040 #define vmaxvq_s32(__a, __b) __arm_vmaxvq_s32(__a, __b)
1041 #define vmaxq_s32(__a, __b) __arm_vmaxq_s32(__a, __b)
1042 #define vhsubq_s32(__a, __b) __arm_vhsubq_s32(__a, __b)
1043 #define vhsubq_n_s32(__a, __b) __arm_vhsubq_n_s32(__a, __b)
1044 #define vhcaddq_rot90_s32(__a, __b) __arm_vhcaddq_rot90_s32(__a, __b)
1045 #define vhcaddq_rot270_s32(__a, __b) __arm_vhcaddq_rot270_s32(__a, __b)
1046 #define vhaddq_s32(__a, __b) __arm_vhaddq_s32(__a, __b)
1047 #define vhaddq_n_s32(__a, __b) __arm_vhaddq_n_s32(__a, __b)
1048 #define veorq_s32(__a, __b) __arm_veorq_s32(__a, __b)
1049 #define vcaddq_rot90_s32(__a, __b) __arm_vcaddq_rot90_s32(__a, __b)
1050 #define vcaddq_rot270_s32(__a, __b) __arm_vcaddq_rot270_s32(__a, __b)
1051 #define vbrsrq_n_s32(__a, __b) __arm_vbrsrq_n_s32(__a, __b)
1052 #define vbicq_s32(__a, __b) __arm_vbicq_s32(__a, __b)
1053 #define vandq_s32(__a, __b) __arm_vandq_s32(__a, __b)
1054 #define vaddvaq_s32(__a, __b) __arm_vaddvaq_s32(__a, __b)
1055 #define vaddq_n_s32(__a, __b) __arm_vaddq_n_s32(__a, __b)
1056 #define vabdq_s32(__a, __b) __arm_vabdq_s32(__a, __b)
1057 #define vshlq_n_s32(__a, __imm) __arm_vshlq_n_s32(__a, __imm)
1058 #define vrshrq_n_s32(__a, __imm) __arm_vrshrq_n_s32(__a, __imm)
1059 #define vqshlq_n_s32(__a, __imm) __arm_vqshlq_n_s32(__a, __imm)
1060 #define vqmovntq_u16(__a, __b) __arm_vqmovntq_u16(__a, __b)
1061 #define vqmovnbq_u16(__a, __b) __arm_vqmovnbq_u16(__a, __b)
1062 #define vmulltq_poly_p8(__a, __b) __arm_vmulltq_poly_p8(__a, __b)
1063 #define vmullbq_poly_p8(__a, __b) __arm_vmullbq_poly_p8(__a, __b)
1064 #define vmovntq_u16(__a, __b) __arm_vmovntq_u16(__a, __b)
1065 #define vmovnbq_u16(__a, __b) __arm_vmovnbq_u16(__a, __b)
1066 #define vmlaldavq_u16(__a, __b) __arm_vmlaldavq_u16(__a, __b)
1067 #define vqmovuntq_s16(__a, __b) __arm_vqmovuntq_s16(__a, __b)
1068 #define vqmovunbq_s16(__a, __b) __arm_vqmovunbq_s16(__a, __b)
1069 #define vshlltq_n_u8(__a, __imm) __arm_vshlltq_n_u8(__a, __imm)
1070 #define vshllbq_n_u8(__a, __imm) __arm_vshllbq_n_u8(__a, __imm)
1071 #define vorrq_n_u16(__a, __imm) __arm_vorrq_n_u16(__a, __imm)
1072 #define vbicq_n_u16(__a, __imm) __arm_vbicq_n_u16(__a, __imm)
1073 #define vcmpneq_n_f16(__a, __b) __arm_vcmpneq_n_f16(__a, __b)
1074 #define vcmpneq_f16(__a, __b) __arm_vcmpneq_f16(__a, __b)
1075 #define vcmpltq_n_f16(__a, __b) __arm_vcmpltq_n_f16(__a, __b)
1076 #define vcmpltq_f16(__a, __b) __arm_vcmpltq_f16(__a, __b)
1077 #define vcmpleq_n_f16(__a, __b) __arm_vcmpleq_n_f16(__a, __b)
1078 #define vcmpleq_f16(__a, __b) __arm_vcmpleq_f16(__a, __b)
1079 #define vcmpgtq_n_f16(__a, __b) __arm_vcmpgtq_n_f16(__a, __b)
1080 #define vcmpgtq_f16(__a, __b) __arm_vcmpgtq_f16(__a, __b)
1081 #define vcmpgeq_n_f16(__a, __b) __arm_vcmpgeq_n_f16(__a, __b)
1082 #define vcmpgeq_f16(__a, __b) __arm_vcmpgeq_f16(__a, __b)
1083 #define vcmpeqq_n_f16(__a, __b) __arm_vcmpeqq_n_f16(__a, __b)
1084 #define vcmpeqq_f16(__a, __b) __arm_vcmpeqq_f16(__a, __b)
1085 #define vsubq_f16(__a, __b) __arm_vsubq_f16(__a, __b)
1086 #define vqmovntq_s16(__a, __b) __arm_vqmovntq_s16(__a, __b)
1087 #define vqmovnbq_s16(__a, __b) __arm_vqmovnbq_s16(__a, __b)
1088 #define vqdmulltq_s16(__a, __b) __arm_vqdmulltq_s16(__a, __b)
1089 #define vqdmulltq_n_s16(__a, __b) __arm_vqdmulltq_n_s16(__a, __b)
1090 #define vqdmullbq_s16(__a, __b) __arm_vqdmullbq_s16(__a, __b)
1091 #define vqdmullbq_n_s16(__a, __b) __arm_vqdmullbq_n_s16(__a, __b)
1092 #define vorrq_f16(__a, __b) __arm_vorrq_f16(__a, __b)
1093 #define vornq_f16(__a, __b) __arm_vornq_f16(__a, __b)
1094 #define vmulq_n_f16(__a, __b) __arm_vmulq_n_f16(__a, __b)
1095 #define vmulq_f16(__a, __b) __arm_vmulq_f16(__a, __b)
1096 #define vmovntq_s16(__a, __b) __arm_vmovntq_s16(__a, __b)
1097 #define vmovnbq_s16(__a, __b) __arm_vmovnbq_s16(__a, __b)
1098 #define vmlsldavxq_s16(__a, __b) __arm_vmlsldavxq_s16(__a, __b)
1099 #define vmlsldavq_s16(__a, __b) __arm_vmlsldavq_s16(__a, __b)
1100 #define vmlaldavxq_s16(__a, __b) __arm_vmlaldavxq_s16(__a, __b)
1101 #define vmlaldavq_s16(__a, __b) __arm_vmlaldavq_s16(__a, __b)
1102 #define vminnmvq_f16(__a, __b) __arm_vminnmvq_f16(__a, __b)
1103 #define vminnmq_f16(__a, __b) __arm_vminnmq_f16(__a, __b)
1104 #define vminnmavq_f16(__a, __b) __arm_vminnmavq_f16(__a, __b)
1105 #define vminnmaq_f16(__a, __b) __arm_vminnmaq_f16(__a, __b)
1106 #define vmaxnmvq_f16(__a, __b) __arm_vmaxnmvq_f16(__a, __b)
1107 #define vmaxnmq_f16(__a, __b) __arm_vmaxnmq_f16(__a, __b)
1108 #define vmaxnmavq_f16(__a, __b) __arm_vmaxnmavq_f16(__a, __b)
1109 #define vmaxnmaq_f16(__a, __b) __arm_vmaxnmaq_f16(__a, __b)
1110 #define veorq_f16(__a, __b) __arm_veorq_f16(__a, __b)
1111 #define vcmulq_rot90_f16(__a, __b) __arm_vcmulq_rot90_f16(__a, __b)
1112 #define vcmulq_rot270_f16(__a, __b) __arm_vcmulq_rot270_f16(__a, __b)
1113 #define vcmulq_rot180_f16(__a, __b) __arm_vcmulq_rot180_f16(__a, __b)
1114 #define vcmulq_f16(__a, __b) __arm_vcmulq_f16(__a, __b)
1115 #define vcaddq_rot90_f16(__a, __b) __arm_vcaddq_rot90_f16(__a, __b)
1116 #define vcaddq_rot270_f16(__a, __b) __arm_vcaddq_rot270_f16(__a, __b)
1117 #define vbicq_f16(__a, __b) __arm_vbicq_f16(__a, __b)
1118 #define vandq_f16(__a, __b) __arm_vandq_f16(__a, __b)
1119 #define vaddq_n_f16(__a, __b) __arm_vaddq_n_f16(__a, __b)
1120 #define vabdq_f16(__a, __b) __arm_vabdq_f16(__a, __b)
1121 #define vshlltq_n_s8(__a, __imm) __arm_vshlltq_n_s8(__a, __imm)
1122 #define vshllbq_n_s8(__a, __imm) __arm_vshllbq_n_s8(__a, __imm)
1123 #define vorrq_n_s16(__a, __imm) __arm_vorrq_n_s16(__a, __imm)
1124 #define vbicq_n_s16(__a, __imm) __arm_vbicq_n_s16(__a, __imm)
1125 #define vqmovntq_u32(__a, __b) __arm_vqmovntq_u32(__a, __b)
1126 #define vqmovnbq_u32(__a, __b) __arm_vqmovnbq_u32(__a, __b)
1127 #define vmulltq_poly_p16(__a, __b) __arm_vmulltq_poly_p16(__a, __b)
1128 #define vmullbq_poly_p16(__a, __b) __arm_vmullbq_poly_p16(__a, __b)
1129 #define vmovntq_u32(__a, __b) __arm_vmovntq_u32(__a, __b)
1130 #define vmovnbq_u32(__a, __b) __arm_vmovnbq_u32(__a, __b)
1131 #define vmlaldavq_u32(__a, __b) __arm_vmlaldavq_u32(__a, __b)
1132 #define vqmovuntq_s32(__a, __b) __arm_vqmovuntq_s32(__a, __b)
1133 #define vqmovunbq_s32(__a, __b) __arm_vqmovunbq_s32(__a, __b)
1134 #define vshlltq_n_u16(__a, __imm) __arm_vshlltq_n_u16(__a, __imm)
1135 #define vshllbq_n_u16(__a, __imm) __arm_vshllbq_n_u16(__a, __imm)
1136 #define vorrq_n_u32(__a, __imm) __arm_vorrq_n_u32(__a, __imm)
1137 #define vbicq_n_u32(__a, __imm) __arm_vbicq_n_u32(__a, __imm)
1138 #define vcmpneq_n_f32(__a, __b) __arm_vcmpneq_n_f32(__a, __b)
1139 #define vcmpneq_f32(__a, __b) __arm_vcmpneq_f32(__a, __b)
1140 #define vcmpltq_n_f32(__a, __b) __arm_vcmpltq_n_f32(__a, __b)
1141 #define vcmpltq_f32(__a, __b) __arm_vcmpltq_f32(__a, __b)
1142 #define vcmpleq_n_f32(__a, __b) __arm_vcmpleq_n_f32(__a, __b)
1143 #define vcmpleq_f32(__a, __b) __arm_vcmpleq_f32(__a, __b)
1144 #define vcmpgtq_n_f32(__a, __b) __arm_vcmpgtq_n_f32(__a, __b)
1145 #define vcmpgtq_f32(__a, __b) __arm_vcmpgtq_f32(__a, __b)
1146 #define vcmpgeq_n_f32(__a, __b) __arm_vcmpgeq_n_f32(__a, __b)
1147 #define vcmpgeq_f32(__a, __b) __arm_vcmpgeq_f32(__a, __b)
1148 #define vcmpeqq_n_f32(__a, __b) __arm_vcmpeqq_n_f32(__a, __b)
1149 #define vcmpeqq_f32(__a, __b) __arm_vcmpeqq_f32(__a, __b)
1150 #define vsubq_f32(__a, __b) __arm_vsubq_f32(__a, __b)
1151 #define vqmovntq_s32(__a, __b) __arm_vqmovntq_s32(__a, __b)
1152 #define vqmovnbq_s32(__a, __b) __arm_vqmovnbq_s32(__a, __b)
1153 #define vqdmulltq_s32(__a, __b) __arm_vqdmulltq_s32(__a, __b)
1154 #define vqdmulltq_n_s32(__a, __b) __arm_vqdmulltq_n_s32(__a, __b)
1155 #define vqdmullbq_s32(__a, __b) __arm_vqdmullbq_s32(__a, __b)
1156 #define vqdmullbq_n_s32(__a, __b) __arm_vqdmullbq_n_s32(__a, __b)
1157 #define vorrq_f32(__a, __b) __arm_vorrq_f32(__a, __b)
1158 #define vornq_f32(__a, __b) __arm_vornq_f32(__a, __b)
1159 #define vmulq_n_f32(__a, __b) __arm_vmulq_n_f32(__a, __b)
1160 #define vmulq_f32(__a, __b) __arm_vmulq_f32(__a, __b)
1161 #define vmovntq_s32(__a, __b) __arm_vmovntq_s32(__a, __b)
1162 #define vmovnbq_s32(__a, __b) __arm_vmovnbq_s32(__a, __b)
1163 #define vmlsldavxq_s32(__a, __b) __arm_vmlsldavxq_s32(__a, __b)
1164 #define vmlsldavq_s32(__a, __b) __arm_vmlsldavq_s32(__a, __b)
1165 #define vmlaldavxq_s32(__a, __b) __arm_vmlaldavxq_s32(__a, __b)
1166 #define vmlaldavq_s32(__a, __b) __arm_vmlaldavq_s32(__a, __b)
1167 #define vminnmvq_f32(__a, __b) __arm_vminnmvq_f32(__a, __b)
1168 #define vminnmq_f32(__a, __b) __arm_vminnmq_f32(__a, __b)
1169 #define vminnmavq_f32(__a, __b) __arm_vminnmavq_f32(__a, __b)
1170 #define vminnmaq_f32(__a, __b) __arm_vminnmaq_f32(__a, __b)
1171 #define vmaxnmvq_f32(__a, __b) __arm_vmaxnmvq_f32(__a, __b)
1172 #define vmaxnmq_f32(__a, __b) __arm_vmaxnmq_f32(__a, __b)
1173 #define vmaxnmavq_f32(__a, __b) __arm_vmaxnmavq_f32(__a, __b)
1174 #define vmaxnmaq_f32(__a, __b) __arm_vmaxnmaq_f32(__a, __b)
1175 #define veorq_f32(__a, __b) __arm_veorq_f32(__a, __b)
1176 #define vcmulq_rot90_f32(__a, __b) __arm_vcmulq_rot90_f32(__a, __b)
1177 #define vcmulq_rot270_f32(__a, __b) __arm_vcmulq_rot270_f32(__a, __b)
1178 #define vcmulq_rot180_f32(__a, __b) __arm_vcmulq_rot180_f32(__a, __b)
1179 #define vcmulq_f32(__a, __b) __arm_vcmulq_f32(__a, __b)
1180 #define vcaddq_rot90_f32(__a, __b) __arm_vcaddq_rot90_f32(__a, __b)
1181 #define vcaddq_rot270_f32(__a, __b) __arm_vcaddq_rot270_f32(__a, __b)
1182 #define vbicq_f32(__a, __b) __arm_vbicq_f32(__a, __b)
1183 #define vandq_f32(__a, __b) __arm_vandq_f32(__a, __b)
1184 #define vaddq_n_f32(__a, __b) __arm_vaddq_n_f32(__a, __b)
1185 #define vabdq_f32(__a, __b) __arm_vabdq_f32(__a, __b)
1186 #define vshlltq_n_s16(__a, __imm) __arm_vshlltq_n_s16(__a, __imm)
1187 #define vshllbq_n_s16(__a, __imm) __arm_vshllbq_n_s16(__a, __imm)
1188 #define vorrq_n_s32(__a, __imm) __arm_vorrq_n_s32(__a, __imm)
1189 #define vbicq_n_s32(__a, __imm) __arm_vbicq_n_s32(__a, __imm)
1190 #define vrmlaldavhq_u32(__a, __b) __arm_vrmlaldavhq_u32(__a, __b)
1191 #define vctp8q_m(__a, __p) __arm_vctp8q_m(__a, __p)
1192 #define vctp64q_m(__a, __p) __arm_vctp64q_m(__a, __p)
1193 #define vctp32q_m(__a, __p) __arm_vctp32q_m(__a, __p)
1194 #define vctp16q_m(__a, __p) __arm_vctp16q_m(__a, __p)
1195 #define vaddlvaq_u32(__a, __b) __arm_vaddlvaq_u32(__a, __b)
1196 #define vrmlsldavhxq_s32(__a, __b) __arm_vrmlsldavhxq_s32(__a, __b)
1197 #define vrmlsldavhq_s32(__a, __b) __arm_vrmlsldavhq_s32(__a, __b)
1198 #define vrmlaldavhxq_s32(__a, __b) __arm_vrmlaldavhxq_s32(__a, __b)
1199 #define vrmlaldavhq_s32(__a, __b) __arm_vrmlaldavhq_s32(__a, __b)
1200 #define vcvttq_f16_f32(__a, __b) __arm_vcvttq_f16_f32(__a, __b)
1201 #define vcvtbq_f16_f32(__a, __b) __arm_vcvtbq_f16_f32(__a, __b)
1202 #define vaddlvaq_s32(__a, __b) __arm_vaddlvaq_s32(__a, __b)
1203 #define vabavq_s8(__a, __b, __c) __arm_vabavq_s8(__a, __b, __c)
1204 #define vabavq_s16(__a, __b, __c) __arm_vabavq_s16(__a, __b, __c)
1205 #define vabavq_s32(__a, __b, __c) __arm_vabavq_s32(__a, __b, __c)
1206 #define vbicq_m_n_s16(__a, __imm, __p) __arm_vbicq_m_n_s16(__a, __imm, __p)
1207 #define vbicq_m_n_s32(__a, __imm, __p) __arm_vbicq_m_n_s32(__a, __imm, __p)
1208 #define vbicq_m_n_u16(__a, __imm, __p) __arm_vbicq_m_n_u16(__a, __imm, __p)
1209 #define vbicq_m_n_u32(__a, __imm, __p) __arm_vbicq_m_n_u32(__a, __imm, __p)
1210 #define vcmpeqq_m_f16(__a, __b, __p) __arm_vcmpeqq_m_f16(__a, __b, __p)
1211 #define vcmpeqq_m_f32(__a, __b, __p) __arm_vcmpeqq_m_f32(__a, __b, __p)
1212 #define vcvtaq_m_s16_f16(__inactive, __a, __p) __arm_vcvtaq_m_s16_f16(__inactive, __a, __p)
1213 #define vcvtaq_m_u16_f16(__inactive, __a, __p) __arm_vcvtaq_m_u16_f16(__inactive, __a, __p)
1214 #define vcvtaq_m_s32_f32(__inactive, __a, __p) __arm_vcvtaq_m_s32_f32(__inactive, __a, __p)
1215 #define vcvtaq_m_u32_f32(__inactive, __a, __p) __arm_vcvtaq_m_u32_f32(__inactive, __a, __p)
1216 #define vcvtq_m_f16_s16(__inactive, __a, __p) __arm_vcvtq_m_f16_s16(__inactive, __a, __p)
1217 #define vcvtq_m_f16_u16(__inactive, __a, __p) __arm_vcvtq_m_f16_u16(__inactive, __a, __p)
1218 #define vcvtq_m_f32_s32(__inactive, __a, __p) __arm_vcvtq_m_f32_s32(__inactive, __a, __p)
1219 #define vcvtq_m_f32_u32(__inactive, __a, __p) __arm_vcvtq_m_f32_u32(__inactive, __a, __p)
1220 #define vqrshrnbq_n_s16(__a, __b, __imm) __arm_vqrshrnbq_n_s16(__a, __b, __imm)
1221 #define vqrshrnbq_n_u16(__a, __b, __imm) __arm_vqrshrnbq_n_u16(__a, __b, __imm)
1222 #define vqrshrnbq_n_s32(__a, __b, __imm) __arm_vqrshrnbq_n_s32(__a, __b, __imm)
1223 #define vqrshrnbq_n_u32(__a, __b, __imm) __arm_vqrshrnbq_n_u32(__a, __b, __imm)
1224 #define vqrshrunbq_n_s16(__a, __b, __imm) __arm_vqrshrunbq_n_s16(__a, __b, __imm)
1225 #define vqrshrunbq_n_s32(__a, __b, __imm) __arm_vqrshrunbq_n_s32(__a, __b, __imm)
1226 #define vrmlaldavhaq_s32(__a, __b, __c) __arm_vrmlaldavhaq_s32(__a, __b, __c)
1227 #define vrmlaldavhaq_u32(__a, __b, __c) __arm_vrmlaldavhaq_u32(__a, __b, __c)
1228 #define vshlcq_s8(__a, __b, __imm) __arm_vshlcq_s8(__a, __b, __imm)
1229 #define vshlcq_u8(__a, __b, __imm) __arm_vshlcq_u8(__a, __b, __imm)
1230 #define vshlcq_s16(__a, __b, __imm) __arm_vshlcq_s16(__a, __b, __imm)
1231 #define vshlcq_u16(__a, __b, __imm) __arm_vshlcq_u16(__a, __b, __imm)
1232 #define vshlcq_s32(__a, __b, __imm) __arm_vshlcq_s32(__a, __b, __imm)
1233 #define vshlcq_u32(__a, __b, __imm) __arm_vshlcq_u32(__a, __b, __imm)
1234 #define vabavq_u8(__a, __b, __c) __arm_vabavq_u8(__a, __b, __c)
1235 #define vabavq_u16(__a, __b, __c) __arm_vabavq_u16(__a, __b, __c)
1236 #define vabavq_u32(__a, __b, __c) __arm_vabavq_u32(__a, __b, __c)
1237 #define vpselq_u8(__a, __b, __p) __arm_vpselq_u8(__a, __b, __p)
1238 #define vpselq_s8(__a, __b, __p) __arm_vpselq_s8(__a, __b, __p)
1239 #define vrev64q_m_u8(__inactive, __a, __p) __arm_vrev64q_m_u8(__inactive, __a, __p)
1240 #define vmvnq_m_u8(__inactive, __a, __p) __arm_vmvnq_m_u8(__inactive, __a, __p)
1241 #define vmlasq_n_u8(__a, __b, __c) __arm_vmlasq_n_u8(__a, __b, __c)
1242 #define vmlaq_n_u8(__a, __b, __c) __arm_vmlaq_n_u8(__a, __b, __c)
1243 #define vmladavq_p_u8(__a, __b, __p) __arm_vmladavq_p_u8(__a, __b, __p)
1244 #define vmladavaq_u8(__a, __b, __c) __arm_vmladavaq_u8(__a, __b, __c)
1245 #define vminvq_p_u8(__a, __b, __p) __arm_vminvq_p_u8(__a, __b, __p)
1246 #define vmaxvq_p_u8(__a, __b, __p) __arm_vmaxvq_p_u8(__a, __b, __p)
1247 #define vdupq_m_n_u8(__inactive, __a, __p) __arm_vdupq_m_n_u8(__inactive, __a, __p)
1248 #define vcmpneq_m_u8(__a, __b, __p) __arm_vcmpneq_m_u8(__a, __b, __p)
1249 #define vcmpneq_m_n_u8(__a, __b, __p) __arm_vcmpneq_m_n_u8(__a, __b, __p)
1250 #define vcmphiq_m_u8(__a, __b, __p) __arm_vcmphiq_m_u8(__a, __b, __p)
1251 #define vcmphiq_m_n_u8(__a, __b, __p) __arm_vcmphiq_m_n_u8(__a, __b, __p)
1252 #define vcmpeqq_m_u8(__a, __b, __p) __arm_vcmpeqq_m_u8(__a, __b, __p)
1253 #define vcmpeqq_m_n_u8(__a, __b, __p) __arm_vcmpeqq_m_n_u8(__a, __b, __p)
1254 #define vcmpcsq_m_u8(__a, __b, __p) __arm_vcmpcsq_m_u8(__a, __b, __p)
1255 #define vcmpcsq_m_n_u8(__a, __b, __p) __arm_vcmpcsq_m_n_u8(__a, __b, __p)
1256 #define vclzq_m_u8(__inactive, __a, __p) __arm_vclzq_m_u8(__inactive, __a, __p)
1257 #define vaddvaq_p_u8(__a, __b, __p) __arm_vaddvaq_p_u8(__a, __b, __p)
1258 #define vsriq_n_u8(__a, __b, __imm) __arm_vsriq_n_u8(__a, __b, __imm)
1259 #define vsliq_n_u8(__a, __b, __imm) __arm_vsliq_n_u8(__a, __b, __imm)
1260 #define vshlq_m_r_u8(__a, __b, __p) __arm_vshlq_m_r_u8(__a, __b, __p)
1261 #define vrshlq_m_n_u8(__a, __b, __p) __arm_vrshlq_m_n_u8(__a, __b, __p)
1262 #define vqshlq_m_r_u8(__a, __b, __p) __arm_vqshlq_m_r_u8(__a, __b, __p)
1263 #define vqrshlq_m_n_u8(__a, __b, __p) __arm_vqrshlq_m_n_u8(__a, __b, __p)
1264 #define vminavq_p_s8(__a, __b, __p) __arm_vminavq_p_s8(__a, __b, __p)
1265 #define vminaq_m_s8(__a, __b, __p) __arm_vminaq_m_s8(__a, __b, __p)
1266 #define vmaxavq_p_s8(__a, __b, __p) __arm_vmaxavq_p_s8(__a, __b, __p)
1267 #define vmaxaq_m_s8(__a, __b, __p) __arm_vmaxaq_m_s8(__a, __b, __p)
1268 #define vcmpneq_m_s8(__a, __b, __p) __arm_vcmpneq_m_s8(__a, __b, __p)
1269 #define vcmpneq_m_n_s8(__a, __b, __p) __arm_vcmpneq_m_n_s8(__a, __b, __p)
1270 #define vcmpltq_m_s8(__a, __b, __p) __arm_vcmpltq_m_s8(__a, __b, __p)
1271 #define vcmpltq_m_n_s8(__a, __b, __p) __arm_vcmpltq_m_n_s8(__a, __b, __p)
1272 #define vcmpleq_m_s8(__a, __b, __p) __arm_vcmpleq_m_s8(__a, __b, __p)
1273 #define vcmpleq_m_n_s8(__a, __b, __p) __arm_vcmpleq_m_n_s8(__a, __b, __p)
1274 #define vcmpgtq_m_s8(__a, __b, __p) __arm_vcmpgtq_m_s8(__a, __b, __p)
1275 #define vcmpgtq_m_n_s8(__a, __b, __p) __arm_vcmpgtq_m_n_s8(__a, __b, __p)
1276 #define vcmpgeq_m_s8(__a, __b, __p) __arm_vcmpgeq_m_s8(__a, __b, __p)
1277 #define vcmpgeq_m_n_s8(__a, __b, __p) __arm_vcmpgeq_m_n_s8(__a, __b, __p)
1278 #define vcmpeqq_m_s8(__a, __b, __p) __arm_vcmpeqq_m_s8(__a, __b, __p)
1279 #define vcmpeqq_m_n_s8(__a, __b, __p) __arm_vcmpeqq_m_n_s8(__a, __b, __p)
1280 #define vshlq_m_r_s8(__a, __b, __p) __arm_vshlq_m_r_s8(__a, __b, __p)
1281 #define vrshlq_m_n_s8(__a, __b, __p) __arm_vrshlq_m_n_s8(__a, __b, __p)
1282 #define vrev64q_m_s8(__inactive, __a, __p) __arm_vrev64q_m_s8(__inactive, __a, __p)
1283 #define vqshlq_m_r_s8(__a, __b, __p) __arm_vqshlq_m_r_s8(__a, __b, __p)
1284 #define vqrshlq_m_n_s8(__a, __b, __p) __arm_vqrshlq_m_n_s8(__a, __b, __p)
1285 #define vqnegq_m_s8(__inactive, __a, __p) __arm_vqnegq_m_s8(__inactive, __a, __p)
1286 #define vqabsq_m_s8(__inactive, __a, __p) __arm_vqabsq_m_s8(__inactive, __a, __p)
1287 #define vnegq_m_s8(__inactive, __a, __p) __arm_vnegq_m_s8(__inactive, __a, __p)
1288 #define vmvnq_m_s8(__inactive, __a, __p) __arm_vmvnq_m_s8(__inactive, __a, __p)
1289 #define vmlsdavxq_p_s8(__a, __b, __p) __arm_vmlsdavxq_p_s8(__a, __b, __p)
1290 #define vmlsdavq_p_s8(__a, __b, __p) __arm_vmlsdavq_p_s8(__a, __b, __p)
1291 #define vmladavxq_p_s8(__a, __b, __p) __arm_vmladavxq_p_s8(__a, __b, __p)
1292 #define vmladavq_p_s8(__a, __b, __p) __arm_vmladavq_p_s8(__a, __b, __p)
1293 #define vminvq_p_s8(__a, __b, __p) __arm_vminvq_p_s8(__a, __b, __p)
1294 #define vmaxvq_p_s8(__a, __b, __p) __arm_vmaxvq_p_s8(__a, __b, __p)
1295 #define vdupq_m_n_s8(__inactive, __a, __p) __arm_vdupq_m_n_s8(__inactive, __a, __p)
1296 #define vclzq_m_s8(__inactive, __a, __p) __arm_vclzq_m_s8(__inactive, __a, __p)
1297 #define vclsq_m_s8(__inactive, __a, __p) __arm_vclsq_m_s8(__inactive, __a, __p)
1298 #define vaddvaq_p_s8(__a, __b, __p) __arm_vaddvaq_p_s8(__a, __b, __p)
1299 #define vabsq_m_s8(__inactive, __a, __p) __arm_vabsq_m_s8(__inactive, __a, __p)
1300 #define vqrdmlsdhxq_s8(__inactive, __a, __b) __arm_vqrdmlsdhxq_s8(__inactive, __a, __b)
1301 #define vqrdmlsdhq_s8(__inactive, __a, __b) __arm_vqrdmlsdhq_s8(__inactive, __a, __b)
1302 #define vqrdmlashq_n_s8(__a, __b, __c) __arm_vqrdmlashq_n_s8(__a, __b, __c)
1303 #define vqrdmlahq_n_s8(__a, __b, __c) __arm_vqrdmlahq_n_s8(__a, __b, __c)
1304 #define vqrdmladhxq_s8(__inactive, __a, __b) __arm_vqrdmladhxq_s8(__inactive, __a, __b)
1305 #define vqrdmladhq_s8(__inactive, __a, __b) __arm_vqrdmladhq_s8(__inactive, __a, __b)
1306 #define vqdmlsdhxq_s8(__inactive, __a, __b) __arm_vqdmlsdhxq_s8(__inactive, __a, __b)
1307 #define vqdmlsdhq_s8(__inactive, __a, __b) __arm_vqdmlsdhq_s8(__inactive, __a, __b)
1308 #define vqdmlahq_n_s8(__a, __b, __c) __arm_vqdmlahq_n_s8(__a, __b, __c)
1309 #define vqdmlashq_n_s8(__a, __b, __c) __arm_vqdmlashq_n_s8(__a, __b, __c)
1310 #define vqdmladhxq_s8(__inactive, __a, __b) __arm_vqdmladhxq_s8(__inactive, __a, __b)
1311 #define vqdmladhq_s8(__inactive, __a, __b) __arm_vqdmladhq_s8(__inactive, __a, __b)
1312 #define vmlsdavaxq_s8(__a, __b, __c) __arm_vmlsdavaxq_s8(__a, __b, __c)
1313 #define vmlsdavaq_s8(__a, __b, __c) __arm_vmlsdavaq_s8(__a, __b, __c)
1314 #define vmlasq_n_s8(__a, __b, __c) __arm_vmlasq_n_s8(__a, __b, __c)
1315 #define vmlaq_n_s8(__a, __b, __c) __arm_vmlaq_n_s8(__a, __b, __c)
1316 #define vmladavaxq_s8(__a, __b, __c) __arm_vmladavaxq_s8(__a, __b, __c)
1317 #define vmladavaq_s8(__a, __b, __c) __arm_vmladavaq_s8(__a, __b, __c)
1318 #define vsriq_n_s8(__a, __b, __imm) __arm_vsriq_n_s8(__a, __b, __imm)
1319 #define vsliq_n_s8(__a, __b, __imm) __arm_vsliq_n_s8(__a, __b, __imm)
1320 #define vpselq_u16(__a, __b, __p) __arm_vpselq_u16(__a, __b, __p)
1321 #define vpselq_s16(__a, __b, __p) __arm_vpselq_s16(__a, __b, __p)
1322 #define vrev64q_m_u16(__inactive, __a, __p) __arm_vrev64q_m_u16(__inactive, __a, __p)
1323 #define vmvnq_m_u16(__inactive, __a, __p) __arm_vmvnq_m_u16(__inactive, __a, __p)
1324 #define vmlasq_n_u16(__a, __b, __c) __arm_vmlasq_n_u16(__a, __b, __c)
1325 #define vmlaq_n_u16(__a, __b, __c) __arm_vmlaq_n_u16(__a, __b, __c)
1326 #define vmladavq_p_u16(__a, __b, __p) __arm_vmladavq_p_u16(__a, __b, __p)
1327 #define vmladavaq_u16(__a, __b, __c) __arm_vmladavaq_u16(__a, __b, __c)
1328 #define vminvq_p_u16(__a, __b, __p) __arm_vminvq_p_u16(__a, __b, __p)
1329 #define vmaxvq_p_u16(__a, __b, __p) __arm_vmaxvq_p_u16(__a, __b, __p)
1330 #define vdupq_m_n_u16(__inactive, __a, __p) __arm_vdupq_m_n_u16(__inactive, __a, __p)
1331 #define vcmpneq_m_u16(__a, __b, __p) __arm_vcmpneq_m_u16(__a, __b, __p)
1332 #define vcmpneq_m_n_u16(__a, __b, __p) __arm_vcmpneq_m_n_u16(__a, __b, __p)
1333 #define vcmphiq_m_u16(__a, __b, __p) __arm_vcmphiq_m_u16(__a, __b, __p)
1334 #define vcmphiq_m_n_u16(__a, __b, __p) __arm_vcmphiq_m_n_u16(__a, __b, __p)
1335 #define vcmpeqq_m_u16(__a, __b, __p) __arm_vcmpeqq_m_u16(__a, __b, __p)
1336 #define vcmpeqq_m_n_u16(__a, __b, __p) __arm_vcmpeqq_m_n_u16(__a, __b, __p)
1337 #define vcmpcsq_m_u16(__a, __b, __p) __arm_vcmpcsq_m_u16(__a, __b, __p)
1338 #define vcmpcsq_m_n_u16(__a, __b, __p) __arm_vcmpcsq_m_n_u16(__a, __b, __p)
1339 #define vclzq_m_u16(__inactive, __a, __p) __arm_vclzq_m_u16(__inactive, __a, __p)
1340 #define vaddvaq_p_u16(__a, __b, __p) __arm_vaddvaq_p_u16(__a, __b, __p)
1341 #define vsriq_n_u16(__a, __b, __imm) __arm_vsriq_n_u16(__a, __b, __imm)
1342 #define vsliq_n_u16(__a, __b, __imm) __arm_vsliq_n_u16(__a, __b, __imm)
1343 #define vshlq_m_r_u16(__a, __b, __p) __arm_vshlq_m_r_u16(__a, __b, __p)
1344 #define vrshlq_m_n_u16(__a, __b, __p) __arm_vrshlq_m_n_u16(__a, __b, __p)
1345 #define vqshlq_m_r_u16(__a, __b, __p) __arm_vqshlq_m_r_u16(__a, __b, __p)
1346 #define vqrshlq_m_n_u16(__a, __b, __p) __arm_vqrshlq_m_n_u16(__a, __b, __p)
1347 #define vminavq_p_s16(__a, __b, __p) __arm_vminavq_p_s16(__a, __b, __p)
1348 #define vminaq_m_s16(__a, __b, __p) __arm_vminaq_m_s16(__a, __b, __p)
1349 #define vmaxavq_p_s16(__a, __b, __p) __arm_vmaxavq_p_s16(__a, __b, __p)
1350 #define vmaxaq_m_s16(__a, __b, __p) __arm_vmaxaq_m_s16(__a, __b, __p)
1351 #define vcmpneq_m_s16(__a, __b, __p) __arm_vcmpneq_m_s16(__a, __b, __p)
1352 #define vcmpneq_m_n_s16(__a, __b, __p) __arm_vcmpneq_m_n_s16(__a, __b, __p)
1353 #define vcmpltq_m_s16(__a, __b, __p) __arm_vcmpltq_m_s16(__a, __b, __p)
1354 #define vcmpltq_m_n_s16(__a, __b, __p) __arm_vcmpltq_m_n_s16(__a, __b, __p)
1355 #define vcmpleq_m_s16(__a, __b, __p) __arm_vcmpleq_m_s16(__a, __b, __p)
1356 #define vcmpleq_m_n_s16(__a, __b, __p) __arm_vcmpleq_m_n_s16(__a, __b, __p)
1357 #define vcmpgtq_m_s16(__a, __b, __p) __arm_vcmpgtq_m_s16(__a, __b, __p)
1358 #define vcmpgtq_m_n_s16(__a, __b, __p) __arm_vcmpgtq_m_n_s16(__a, __b, __p)
1359 #define vcmpgeq_m_s16(__a, __b, __p) __arm_vcmpgeq_m_s16(__a, __b, __p)
1360 #define vcmpgeq_m_n_s16(__a, __b, __p) __arm_vcmpgeq_m_n_s16(__a, __b, __p)
1361 #define vcmpeqq_m_s16(__a, __b, __p) __arm_vcmpeqq_m_s16(__a, __b, __p)
1362 #define vcmpeqq_m_n_s16(__a, __b, __p) __arm_vcmpeqq_m_n_s16(__a, __b, __p)
1363 #define vshlq_m_r_s16(__a, __b, __p) __arm_vshlq_m_r_s16(__a, __b, __p)
1364 #define vrshlq_m_n_s16(__a, __b, __p) __arm_vrshlq_m_n_s16(__a, __b, __p)
1365 #define vrev64q_m_s16(__inactive, __a, __p) __arm_vrev64q_m_s16(__inactive, __a, __p)
1366 #define vqshlq_m_r_s16(__a, __b, __p) __arm_vqshlq_m_r_s16(__a, __b, __p)
1367 #define vqrshlq_m_n_s16(__a, __b, __p) __arm_vqrshlq_m_n_s16(__a, __b, __p)
1368 #define vqnegq_m_s16(__inactive, __a, __p) __arm_vqnegq_m_s16(__inactive, __a, __p)
1369 #define vqabsq_m_s16(__inactive, __a, __p) __arm_vqabsq_m_s16(__inactive, __a, __p)
1370 #define vnegq_m_s16(__inactive, __a, __p) __arm_vnegq_m_s16(__inactive, __a, __p)
1371 #define vmvnq_m_s16(__inactive, __a, __p) __arm_vmvnq_m_s16(__inactive, __a, __p)
1372 #define vmlsdavxq_p_s16(__a, __b, __p) __arm_vmlsdavxq_p_s16(__a, __b, __p)
1373 #define vmlsdavq_p_s16(__a, __b, __p) __arm_vmlsdavq_p_s16(__a, __b, __p)
1374 #define vmladavxq_p_s16(__a, __b, __p) __arm_vmladavxq_p_s16(__a, __b, __p)
1375 #define vmladavq_p_s16(__a, __b, __p) __arm_vmladavq_p_s16(__a, __b, __p)
1376 #define vminvq_p_s16(__a, __b, __p) __arm_vminvq_p_s16(__a, __b, __p)
1377 #define vmaxvq_p_s16(__a, __b, __p) __arm_vmaxvq_p_s16(__a, __b, __p)
1378 #define vdupq_m_n_s16(__inactive, __a, __p) __arm_vdupq_m_n_s16(__inactive, __a, __p)
1379 #define vclzq_m_s16(__inactive, __a, __p) __arm_vclzq_m_s16(__inactive, __a, __p)
1380 #define vclsq_m_s16(__inactive, __a, __p) __arm_vclsq_m_s16(__inactive, __a, __p)
1381 #define vaddvaq_p_s16(__a, __b, __p) __arm_vaddvaq_p_s16(__a, __b, __p)
1382 #define vabsq_m_s16(__inactive, __a, __p) __arm_vabsq_m_s16(__inactive, __a, __p)
1383 #define vqrdmlsdhxq_s16(__inactive, __a, __b) __arm_vqrdmlsdhxq_s16(__inactive, __a, __b)
1384 #define vqrdmlsdhq_s16(__inactive, __a, __b) __arm_vqrdmlsdhq_s16(__inactive, __a, __b)
1385 #define vqrdmlashq_n_s16(__a, __b, __c) __arm_vqrdmlashq_n_s16(__a, __b, __c)
1386 #define vqrdmlahq_n_s16(__a, __b, __c) __arm_vqrdmlahq_n_s16(__a, __b, __c)
1387 #define vqrdmladhxq_s16(__inactive, __a, __b) __arm_vqrdmladhxq_s16(__inactive, __a, __b)
1388 #define vqrdmladhq_s16(__inactive, __a, __b) __arm_vqrdmladhq_s16(__inactive, __a, __b)
1389 #define vqdmlsdhxq_s16(__inactive, __a, __b) __arm_vqdmlsdhxq_s16(__inactive, __a, __b)
1390 #define vqdmlsdhq_s16(__inactive, __a, __b) __arm_vqdmlsdhq_s16(__inactive, __a, __b)
1391 #define vqdmlashq_n_s16(__a, __b, __c) __arm_vqdmlashq_n_s16(__a, __b, __c)
1392 #define vqdmlahq_n_s16(__a, __b, __c) __arm_vqdmlahq_n_s16(__a, __b, __c)
1393 #define vqdmladhxq_s16(__inactive, __a, __b) __arm_vqdmladhxq_s16(__inactive, __a, __b)
1394 #define vqdmladhq_s16(__inactive, __a, __b) __arm_vqdmladhq_s16(__inactive, __a, __b)
1395 #define vmlsdavaxq_s16(__a, __b, __c) __arm_vmlsdavaxq_s16(__a, __b, __c)
1396 #define vmlsdavaq_s16(__a, __b, __c) __arm_vmlsdavaq_s16(__a, __b, __c)
1397 #define vmlasq_n_s16(__a, __b, __c) __arm_vmlasq_n_s16(__a, __b, __c)
1398 #define vmlaq_n_s16(__a, __b, __c) __arm_vmlaq_n_s16(__a, __b, __c)
1399 #define vmladavaxq_s16(__a, __b, __c) __arm_vmladavaxq_s16(__a, __b, __c)
1400 #define vmladavaq_s16(__a, __b, __c) __arm_vmladavaq_s16(__a, __b, __c)
1401 #define vsriq_n_s16(__a, __b, __imm) __arm_vsriq_n_s16(__a, __b, __imm)
1402 #define vsliq_n_s16(__a, __b, __imm) __arm_vsliq_n_s16(__a, __b, __imm)
1403 #define vpselq_u32(__a, __b, __p) __arm_vpselq_u32(__a, __b, __p)
1404 #define vpselq_s32(__a, __b, __p) __arm_vpselq_s32(__a, __b, __p)
1405 #define vrev64q_m_u32(__inactive, __a, __p) __arm_vrev64q_m_u32(__inactive, __a, __p)
1406 #define vmvnq_m_u32(__inactive, __a, __p) __arm_vmvnq_m_u32(__inactive, __a, __p)
1407 #define vmlasq_n_u32(__a, __b, __c) __arm_vmlasq_n_u32(__a, __b, __c)
1408 #define vmlaq_n_u32(__a, __b, __c) __arm_vmlaq_n_u32(__a, __b, __c)
1409 #define vmladavq_p_u32(__a, __b, __p) __arm_vmladavq_p_u32(__a, __b, __p)
1410 #define vmladavaq_u32(__a, __b, __c) __arm_vmladavaq_u32(__a, __b, __c)
1411 #define vminvq_p_u32(__a, __b, __p) __arm_vminvq_p_u32(__a, __b, __p)
1412 #define vmaxvq_p_u32(__a, __b, __p) __arm_vmaxvq_p_u32(__a, __b, __p)
1413 #define vdupq_m_n_u32(__inactive, __a, __p) __arm_vdupq_m_n_u32(__inactive, __a, __p)
1414 #define vcmpneq_m_u32(__a, __b, __p) __arm_vcmpneq_m_u32(__a, __b, __p)
1415 #define vcmpneq_m_n_u32(__a, __b, __p) __arm_vcmpneq_m_n_u32(__a, __b, __p)
1416 #define vcmphiq_m_u32(__a, __b, __p) __arm_vcmphiq_m_u32(__a, __b, __p)
1417 #define vcmphiq_m_n_u32(__a, __b, __p) __arm_vcmphiq_m_n_u32(__a, __b, __p)
1418 #define vcmpeqq_m_u32(__a, __b, __p) __arm_vcmpeqq_m_u32(__a, __b, __p)
1419 #define vcmpeqq_m_n_u32(__a, __b, __p) __arm_vcmpeqq_m_n_u32(__a, __b, __p)
1420 #define vcmpcsq_m_u32(__a, __b, __p) __arm_vcmpcsq_m_u32(__a, __b, __p)
1421 #define vcmpcsq_m_n_u32(__a, __b, __p) __arm_vcmpcsq_m_n_u32(__a, __b, __p)
1422 #define vclzq_m_u32(__inactive, __a, __p) __arm_vclzq_m_u32(__inactive, __a, __p)
1423 #define vaddvaq_p_u32(__a, __b, __p) __arm_vaddvaq_p_u32(__a, __b, __p)
1424 #define vsriq_n_u32(__a, __b, __imm) __arm_vsriq_n_u32(__a, __b, __imm)
1425 #define vsliq_n_u32(__a, __b, __imm) __arm_vsliq_n_u32(__a, __b, __imm)
1426 #define vshlq_m_r_u32(__a, __b, __p) __arm_vshlq_m_r_u32(__a, __b, __p)
1427 #define vrshlq_m_n_u32(__a, __b, __p) __arm_vrshlq_m_n_u32(__a, __b, __p)
1428 #define vqshlq_m_r_u32(__a, __b, __p) __arm_vqshlq_m_r_u32(__a, __b, __p)
1429 #define vqrshlq_m_n_u32(__a, __b, __p) __arm_vqrshlq_m_n_u32(__a, __b, __p)
1430 #define vminavq_p_s32(__a, __b, __p) __arm_vminavq_p_s32(__a, __b, __p)
1431 #define vminaq_m_s32(__a, __b, __p) __arm_vminaq_m_s32(__a, __b, __p)
1432 #define vmaxavq_p_s32(__a, __b, __p) __arm_vmaxavq_p_s32(__a, __b, __p)
1433 #define vmaxaq_m_s32(__a, __b, __p) __arm_vmaxaq_m_s32(__a, __b, __p)
1434 #define vcmpneq_m_s32(__a, __b, __p) __arm_vcmpneq_m_s32(__a, __b, __p)
1435 #define vcmpneq_m_n_s32(__a, __b, __p) __arm_vcmpneq_m_n_s32(__a, __b, __p)
1436 #define vcmpltq_m_s32(__a, __b, __p) __arm_vcmpltq_m_s32(__a, __b, __p)
1437 #define vcmpltq_m_n_s32(__a, __b, __p) __arm_vcmpltq_m_n_s32(__a, __b, __p)
1438 #define vcmpleq_m_s32(__a, __b, __p) __arm_vcmpleq_m_s32(__a, __b, __p)
1439 #define vcmpleq_m_n_s32(__a, __b, __p) __arm_vcmpleq_m_n_s32(__a, __b, __p)
1440 #define vcmpgtq_m_s32(__a, __b, __p) __arm_vcmpgtq_m_s32(__a, __b, __p)
1441 #define vcmpgtq_m_n_s32(__a, __b, __p) __arm_vcmpgtq_m_n_s32(__a, __b, __p)
1442 #define vcmpgeq_m_s32(__a, __b, __p) __arm_vcmpgeq_m_s32(__a, __b, __p)
1443 #define vcmpgeq_m_n_s32(__a, __b, __p) __arm_vcmpgeq_m_n_s32(__a, __b, __p)
1444 #define vcmpeqq_m_s32(__a, __b, __p) __arm_vcmpeqq_m_s32(__a, __b, __p)
1445 #define vcmpeqq_m_n_s32(__a, __b, __p) __arm_vcmpeqq_m_n_s32(__a, __b, __p)
1446 #define vshlq_m_r_s32(__a, __b, __p) __arm_vshlq_m_r_s32(__a, __b, __p)
1447 #define vrshlq_m_n_s32(__a, __b, __p) __arm_vrshlq_m_n_s32(__a, __b, __p)
1448 #define vrev64q_m_s32(__inactive, __a, __p) __arm_vrev64q_m_s32(__inactive, __a, __p)
1449 #define vqshlq_m_r_s32(__a, __b, __p) __arm_vqshlq_m_r_s32(__a, __b, __p)
1450 #define vqrshlq_m_n_s32(__a, __b, __p) __arm_vqrshlq_m_n_s32(__a, __b, __p)
1451 #define vqnegq_m_s32(__inactive, __a, __p) __arm_vqnegq_m_s32(__inactive, __a, __p)
1452 #define vqabsq_m_s32(__inactive, __a, __p) __arm_vqabsq_m_s32(__inactive, __a, __p)
1453 #define vnegq_m_s32(__inactive, __a, __p) __arm_vnegq_m_s32(__inactive, __a, __p)
1454 #define vmvnq_m_s32(__inactive, __a, __p) __arm_vmvnq_m_s32(__inactive, __a, __p)
1455 #define vmlsdavxq_p_s32(__a, __b, __p) __arm_vmlsdavxq_p_s32(__a, __b, __p)
1456 #define vmlsdavq_p_s32(__a, __b, __p) __arm_vmlsdavq_p_s32(__a, __b, __p)
1457 #define vmladavxq_p_s32(__a, __b, __p) __arm_vmladavxq_p_s32(__a, __b, __p)
1458 #define vmladavq_p_s32(__a, __b, __p) __arm_vmladavq_p_s32(__a, __b, __p)
1459 #define vminvq_p_s32(__a, __b, __p) __arm_vminvq_p_s32(__a, __b, __p)
1460 #define vmaxvq_p_s32(__a, __b, __p) __arm_vmaxvq_p_s32(__a, __b, __p)
1461 #define vdupq_m_n_s32(__inactive, __a, __p) __arm_vdupq_m_n_s32(__inactive, __a, __p)
1462 #define vclzq_m_s32(__inactive, __a, __p) __arm_vclzq_m_s32(__inactive, __a, __p)
1463 #define vclsq_m_s32(__inactive, __a, __p) __arm_vclsq_m_s32(__inactive, __a, __p)
1464 #define vaddvaq_p_s32(__a, __b, __p) __arm_vaddvaq_p_s32(__a, __b, __p)
1465 #define vabsq_m_s32(__inactive, __a, __p) __arm_vabsq_m_s32(__inactive, __a, __p)
1466 #define vqrdmlsdhxq_s32(__inactive, __a, __b) __arm_vqrdmlsdhxq_s32(__inactive, __a, __b)
1467 #define vqrdmlsdhq_s32(__inactive, __a, __b) __arm_vqrdmlsdhq_s32(__inactive, __a, __b)
1468 #define vqrdmlashq_n_s32(__a, __b, __c) __arm_vqrdmlashq_n_s32(__a, __b, __c)
1469 #define vqrdmlahq_n_s32(__a, __b, __c) __arm_vqrdmlahq_n_s32(__a, __b, __c)
1470 #define vqrdmladhxq_s32(__inactive, __a, __b) __arm_vqrdmladhxq_s32(__inactive, __a, __b)
1471 #define vqrdmladhq_s32(__inactive, __a, __b) __arm_vqrdmladhq_s32(__inactive, __a, __b)
1472 #define vqdmlsdhxq_s32(__inactive, __a, __b) __arm_vqdmlsdhxq_s32(__inactive, __a, __b)
1473 #define vqdmlsdhq_s32(__inactive, __a, __b) __arm_vqdmlsdhq_s32(__inactive, __a, __b)
1474 #define vqdmlashq_n_s32(__a, __b, __c) __arm_vqdmlashq_n_s32(__a, __b, __c)
1475 #define vqdmlahq_n_s32(__a, __b, __c) __arm_vqdmlahq_n_s32(__a, __b, __c)
1476 #define vqdmladhxq_s32(__inactive, __a, __b) __arm_vqdmladhxq_s32(__inactive, __a, __b)
1477 #define vqdmladhq_s32(__inactive, __a, __b) __arm_vqdmladhq_s32(__inactive, __a, __b)
1478 #define vmlsdavaxq_s32(__a, __b, __c) __arm_vmlsdavaxq_s32(__a, __b, __c)
1479 #define vmlsdavaq_s32(__a, __b, __c) __arm_vmlsdavaq_s32(__a, __b, __c)
1480 #define vmlasq_n_s32(__a, __b, __c) __arm_vmlasq_n_s32(__a, __b, __c)
1481 #define vmlaq_n_s32(__a, __b, __c) __arm_vmlaq_n_s32(__a, __b, __c)
1482 #define vmladavaxq_s32(__a, __b, __c) __arm_vmladavaxq_s32(__a, __b, __c)
1483 #define vmladavaq_s32(__a, __b, __c) __arm_vmladavaq_s32(__a, __b, __c)
1484 #define vsriq_n_s32(__a, __b, __imm) __arm_vsriq_n_s32(__a, __b, __imm)
1485 #define vsliq_n_s32(__a, __b, __imm) __arm_vsliq_n_s32(__a, __b, __imm)
1486 #define vpselq_u64(__a, __b, __p) __arm_vpselq_u64(__a, __b, __p)
1487 #define vpselq_s64(__a, __b, __p) __arm_vpselq_s64(__a, __b, __p)
1488 #define vrmlaldavhaxq_s32(__a, __b, __c) __arm_vrmlaldavhaxq_s32(__a, __b, __c)
1489 #define vrmlsldavhaq_s32(__a, __b, __c) __arm_vrmlsldavhaq_s32(__a, __b, __c)
1490 #define vrmlsldavhaxq_s32(__a, __b, __c) __arm_vrmlsldavhaxq_s32(__a, __b, __c)
1491 #define vaddlvaq_p_s32(__a, __b, __p) __arm_vaddlvaq_p_s32(__a, __b, __p)
1492 #define vcvtbq_m_f16_f32(__a, __b, __p) __arm_vcvtbq_m_f16_f32(__a, __b, __p)
1493 #define vcvtbq_m_f32_f16(__inactive, __a, __p) __arm_vcvtbq_m_f32_f16(__inactive, __a, __p)
1494 #define vcvttq_m_f16_f32(__a, __b, __p) __arm_vcvttq_m_f16_f32(__a, __b, __p)
1495 #define vcvttq_m_f32_f16(__inactive, __a, __p) __arm_vcvttq_m_f32_f16(__inactive, __a, __p)
1496 #define vrev16q_m_s8(__inactive, __a, __p) __arm_vrev16q_m_s8(__inactive, __a, __p)
1497 #define vrev32q_m_f16(__inactive, __a, __p) __arm_vrev32q_m_f16(__inactive, __a, __p)
1498 #define vrmlaldavhq_p_s32(__a, __b, __p) __arm_vrmlaldavhq_p_s32(__a, __b, __p)
1499 #define vrmlaldavhxq_p_s32(__a, __b, __p) __arm_vrmlaldavhxq_p_s32(__a, __b, __p)
1500 #define vrmlsldavhq_p_s32(__a, __b, __p) __arm_vrmlsldavhq_p_s32(__a, __b, __p)
1501 #define vrmlsldavhxq_p_s32(__a, __b, __p) __arm_vrmlsldavhxq_p_s32(__a, __b, __p)
1502 #define vaddlvaq_p_u32(__a, __b, __p) __arm_vaddlvaq_p_u32(__a, __b, __p)
1503 #define vrev16q_m_u8(__inactive, __a, __p) __arm_vrev16q_m_u8(__inactive, __a, __p)
1504 #define vrmlaldavhq_p_u32(__a, __b, __p) __arm_vrmlaldavhq_p_u32(__a, __b, __p)
1505 #define vmvnq_m_n_s16(__inactive, __imm, __p) __arm_vmvnq_m_n_s16(__inactive, __imm, __p)
1506 #define vorrq_m_n_s16(__a, __imm, __p) __arm_vorrq_m_n_s16(__a, __imm, __p)
1507 #define vqrshrntq_n_s16(__a, __b, __imm) __arm_vqrshrntq_n_s16(__a, __b, __imm)
1508 #define vqshrnbq_n_s16(__a, __b, __imm) __arm_vqshrnbq_n_s16(__a, __b, __imm)
1509 #define vqshrntq_n_s16(__a, __b, __imm) __arm_vqshrntq_n_s16(__a, __b, __imm)
1510 #define vrshrnbq_n_s16(__a, __b, __imm) __arm_vrshrnbq_n_s16(__a, __b, __imm)
1511 #define vrshrntq_n_s16(__a, __b, __imm) __arm_vrshrntq_n_s16(__a, __b, __imm)
1512 #define vshrnbq_n_s16(__a, __b, __imm) __arm_vshrnbq_n_s16(__a, __b, __imm)
1513 #define vshrntq_n_s16(__a, __b, __imm) __arm_vshrntq_n_s16(__a, __b, __imm)
1514 #define vcmlaq_f16(__a, __b, __c) __arm_vcmlaq_f16(__a, __b, __c)
1515 #define vcmlaq_rot180_f16(__a, __b, __c) __arm_vcmlaq_rot180_f16(__a, __b, __c)
1516 #define vcmlaq_rot270_f16(__a, __b, __c) __arm_vcmlaq_rot270_f16(__a, __b, __c)
1517 #define vcmlaq_rot90_f16(__a, __b, __c) __arm_vcmlaq_rot90_f16(__a, __b, __c)
1518 #define vfmaq_f16(__a, __b, __c) __arm_vfmaq_f16(__a, __b, __c)
1519 #define vfmaq_n_f16(__a, __b, __c) __arm_vfmaq_n_f16(__a, __b, __c)
1520 #define vfmasq_n_f16(__a, __b, __c) __arm_vfmasq_n_f16(__a, __b, __c)
1521 #define vfmsq_f16(__a, __b, __c) __arm_vfmsq_f16(__a, __b, __c)
1522 #define vmlaldavaq_s16(__a, __b, __c) __arm_vmlaldavaq_s16(__a, __b, __c)
1523 #define vmlaldavaxq_s16(__a, __b, __c) __arm_vmlaldavaxq_s16(__a, __b, __c)
1524 #define vmlsldavaq_s16(__a, __b, __c) __arm_vmlsldavaq_s16(__a, __b, __c)
1525 #define vmlsldavaxq_s16(__a, __b, __c) __arm_vmlsldavaxq_s16(__a, __b, __c)
1526 #define vabsq_m_f16(__inactive, __a, __p) __arm_vabsq_m_f16(__inactive, __a, __p)
1527 #define vcvtmq_m_s16_f16(__inactive, __a, __p) __arm_vcvtmq_m_s16_f16(__inactive, __a, __p)
1528 #define vcvtnq_m_s16_f16(__inactive, __a, __p) __arm_vcvtnq_m_s16_f16(__inactive, __a, __p)
1529 #define vcvtpq_m_s16_f16(__inactive, __a, __p) __arm_vcvtpq_m_s16_f16(__inactive, __a, __p)
1530 #define vcvtq_m_s16_f16(__inactive, __a, __p) __arm_vcvtq_m_s16_f16(__inactive, __a, __p)
1531 #define vdupq_m_n_f16(__inactive, __a, __p) __arm_vdupq_m_n_f16(__inactive, __a, __p)
1532 #define vmaxnmaq_m_f16(__a, __b, __p) __arm_vmaxnmaq_m_f16(__a, __b, __p)
1533 #define vmaxnmavq_p_f16(__a, __b, __p) __arm_vmaxnmavq_p_f16(__a, __b, __p)
1534 #define vmaxnmvq_p_f16(__a, __b, __p) __arm_vmaxnmvq_p_f16(__a, __b, __p)
1535 #define vminnmaq_m_f16(__a, __b, __p) __arm_vminnmaq_m_f16(__a, __b, __p)
1536 #define vminnmavq_p_f16(__a, __b, __p) __arm_vminnmavq_p_f16(__a, __b, __p)
1537 #define vminnmvq_p_f16(__a, __b, __p) __arm_vminnmvq_p_f16(__a, __b, __p)
1538 #define vmlaldavq_p_s16(__a, __b, __p) __arm_vmlaldavq_p_s16(__a, __b, __p)
1539 #define vmlaldavxq_p_s16(__a, __b, __p) __arm_vmlaldavxq_p_s16(__a, __b, __p)
1540 #define vmlsldavq_p_s16(__a, __b, __p) __arm_vmlsldavq_p_s16(__a, __b, __p)
1541 #define vmlsldavxq_p_s16(__a, __b, __p) __arm_vmlsldavxq_p_s16(__a, __b, __p)
1542 #define vmovlbq_m_s8(__inactive, __a, __p) __arm_vmovlbq_m_s8(__inactive, __a, __p)
1543 #define vmovltq_m_s8(__inactive, __a, __p) __arm_vmovltq_m_s8(__inactive, __a, __p)
1544 #define vmovnbq_m_s16(__a, __b, __p) __arm_vmovnbq_m_s16(__a, __b, __p)
1545 #define vmovntq_m_s16(__a, __b, __p) __arm_vmovntq_m_s16(__a, __b, __p)
1546 #define vnegq_m_f16(__inactive, __a, __p) __arm_vnegq_m_f16(__inactive, __a, __p)
1547 #define vpselq_f16(__a, __b, __p) __arm_vpselq_f16(__a, __b, __p)
1548 #define vqmovnbq_m_s16(__a, __b, __p) __arm_vqmovnbq_m_s16(__a, __b, __p)
1549 #define vqmovntq_m_s16(__a, __b, __p) __arm_vqmovntq_m_s16(__a, __b, __p)
1550 #define vrev32q_m_s8(__inactive, __a, __p) __arm_vrev32q_m_s8(__inactive, __a, __p)
1551 #define vrev64q_m_f16(__inactive, __a, __p) __arm_vrev64q_m_f16(__inactive, __a, __p)
1552 #define vrndaq_m_f16(__inactive, __a, __p) __arm_vrndaq_m_f16(__inactive, __a, __p)
1553 #define vrndmq_m_f16(__inactive, __a, __p) __arm_vrndmq_m_f16(__inactive, __a, __p)
1554 #define vrndnq_m_f16(__inactive, __a, __p) __arm_vrndnq_m_f16(__inactive, __a, __p)
1555 #define vrndpq_m_f16(__inactive, __a, __p) __arm_vrndpq_m_f16(__inactive, __a, __p)
1556 #define vrndq_m_f16(__inactive, __a, __p) __arm_vrndq_m_f16(__inactive, __a, __p)
1557 #define vrndxq_m_f16(__inactive, __a, __p) __arm_vrndxq_m_f16(__inactive, __a, __p)
1558 #define vcmpeqq_m_n_f16(__a, __b, __p) __arm_vcmpeqq_m_n_f16(__a, __b, __p)
1559 #define vcmpgeq_m_f16(__a, __b, __p) __arm_vcmpgeq_m_f16(__a, __b, __p)
1560 #define vcmpgeq_m_n_f16(__a, __b, __p) __arm_vcmpgeq_m_n_f16(__a, __b, __p)
1561 #define vcmpgtq_m_f16(__a, __b, __p) __arm_vcmpgtq_m_f16(__a, __b, __p)
1562 #define vcmpgtq_m_n_f16(__a, __b, __p) __arm_vcmpgtq_m_n_f16(__a, __b, __p)
1563 #define vcmpleq_m_f16(__a, __b, __p) __arm_vcmpleq_m_f16(__a, __b, __p)
1564 #define vcmpleq_m_n_f16(__a, __b, __p) __arm_vcmpleq_m_n_f16(__a, __b, __p)
1565 #define vcmpltq_m_f16(__a, __b, __p) __arm_vcmpltq_m_f16(__a, __b, __p)
1566 #define vcmpltq_m_n_f16(__a, __b, __p) __arm_vcmpltq_m_n_f16(__a, __b, __p)
1567 #define vcmpneq_m_f16(__a, __b, __p) __arm_vcmpneq_m_f16(__a, __b, __p)
1568 #define vcmpneq_m_n_f16(__a, __b, __p) __arm_vcmpneq_m_n_f16(__a, __b, __p)
1569 #define vmvnq_m_n_u16(__inactive, __imm, __p) __arm_vmvnq_m_n_u16(__inactive, __imm, __p)
1570 #define vorrq_m_n_u16(__a, __imm, __p) __arm_vorrq_m_n_u16(__a, __imm, __p)
1571 #define vqrshruntq_n_s16(__a, __b, __imm) __arm_vqrshruntq_n_s16(__a, __b, __imm)
1572 #define vqshrunbq_n_s16(__a, __b, __imm) __arm_vqshrunbq_n_s16(__a, __b, __imm)
1573 #define vqshruntq_n_s16(__a, __b, __imm) __arm_vqshruntq_n_s16(__a, __b, __imm)
1574 #define vcvtmq_m_u16_f16(__inactive, __a, __p) __arm_vcvtmq_m_u16_f16(__inactive, __a, __p)
1575 #define vcvtnq_m_u16_f16(__inactive, __a, __p) __arm_vcvtnq_m_u16_f16(__inactive, __a, __p)
1576 #define vcvtpq_m_u16_f16(__inactive, __a, __p) __arm_vcvtpq_m_u16_f16(__inactive, __a, __p)
1577 #define vcvtq_m_u16_f16(__inactive, __a, __p) __arm_vcvtq_m_u16_f16(__inactive, __a, __p)
1578 #define vqmovunbq_m_s16(__a, __b, __p) __arm_vqmovunbq_m_s16(__a, __b, __p)
1579 #define vqmovuntq_m_s16(__a, __b, __p) __arm_vqmovuntq_m_s16(__a, __b, __p)
1580 #define vqrshrntq_n_u16(__a, __b, __imm) __arm_vqrshrntq_n_u16(__a, __b, __imm)
1581 #define vqshrnbq_n_u16(__a, __b, __imm) __arm_vqshrnbq_n_u16(__a, __b, __imm)
1582 #define vqshrntq_n_u16(__a, __b, __imm) __arm_vqshrntq_n_u16(__a, __b, __imm)
1583 #define vrshrnbq_n_u16(__a, __b, __imm) __arm_vrshrnbq_n_u16(__a, __b, __imm)
1584 #define vrshrntq_n_u16(__a, __b, __imm) __arm_vrshrntq_n_u16(__a, __b, __imm)
1585 #define vshrnbq_n_u16(__a, __b, __imm) __arm_vshrnbq_n_u16(__a, __b, __imm)
1586 #define vshrntq_n_u16(__a, __b, __imm) __arm_vshrntq_n_u16(__a, __b, __imm)
1587 #define vmlaldavaq_u16(__a, __b, __c) __arm_vmlaldavaq_u16(__a, __b, __c)
1588 #define vmlaldavq_p_u16(__a, __b, __p) __arm_vmlaldavq_p_u16(__a, __b, __p)
1589 #define vmovlbq_m_u8(__inactive, __a, __p) __arm_vmovlbq_m_u8(__inactive, __a, __p)
1590 #define vmovltq_m_u8(__inactive, __a, __p) __arm_vmovltq_m_u8(__inactive, __a, __p)
1591 #define vmovnbq_m_u16(__a, __b, __p) __arm_vmovnbq_m_u16(__a, __b, __p)
1592 #define vmovntq_m_u16(__a, __b, __p) __arm_vmovntq_m_u16(__a, __b, __p)
1593 #define vqmovnbq_m_u16(__a, __b, __p) __arm_vqmovnbq_m_u16(__a, __b, __p)
1594 #define vqmovntq_m_u16(__a, __b, __p) __arm_vqmovntq_m_u16(__a, __b, __p)
1595 #define vrev32q_m_u8(__inactive, __a, __p) __arm_vrev32q_m_u8(__inactive, __a, __p)
1596 #define vmvnq_m_n_s32(__inactive, __imm, __p) __arm_vmvnq_m_n_s32(__inactive, __imm, __p)
1597 #define vorrq_m_n_s32(__a, __imm, __p) __arm_vorrq_m_n_s32(__a, __imm, __p)
1598 #define vqrshrntq_n_s32(__a, __b, __imm) __arm_vqrshrntq_n_s32(__a, __b, __imm)
1599 #define vqshrnbq_n_s32(__a, __b, __imm) __arm_vqshrnbq_n_s32(__a, __b, __imm)
1600 #define vqshrntq_n_s32(__a, __b, __imm) __arm_vqshrntq_n_s32(__a, __b, __imm)
1601 #define vrshrnbq_n_s32(__a, __b, __imm) __arm_vrshrnbq_n_s32(__a, __b, __imm)
1602 #define vrshrntq_n_s32(__a, __b, __imm) __arm_vrshrntq_n_s32(__a, __b, __imm)
1603 #define vshrnbq_n_s32(__a, __b, __imm) __arm_vshrnbq_n_s32(__a, __b, __imm)
1604 #define vshrntq_n_s32(__a, __b, __imm) __arm_vshrntq_n_s32(__a, __b, __imm)
1605 #define vcmlaq_f32(__a, __b, __c) __arm_vcmlaq_f32(__a, __b, __c)
1606 #define vcmlaq_rot180_f32(__a, __b, __c) __arm_vcmlaq_rot180_f32(__a, __b, __c)
1607 #define vcmlaq_rot270_f32(__a, __b, __c) __arm_vcmlaq_rot270_f32(__a, __b, __c)
1608 #define vcmlaq_rot90_f32(__a, __b, __c) __arm_vcmlaq_rot90_f32(__a, __b, __c)
1609 #define vfmaq_f32(__a, __b, __c) __arm_vfmaq_f32(__a, __b, __c)
1610 #define vfmaq_n_f32(__a, __b, __c) __arm_vfmaq_n_f32(__a, __b, __c)
1611 #define vfmasq_n_f32(__a, __b, __c) __arm_vfmasq_n_f32(__a, __b, __c)
1612 #define vfmsq_f32(__a, __b, __c) __arm_vfmsq_f32(__a, __b, __c)
1613 #define vmlaldavaq_s32(__a, __b, __c) __arm_vmlaldavaq_s32(__a, __b, __c)
1614 #define vmlaldavaxq_s32(__a, __b, __c) __arm_vmlaldavaxq_s32(__a, __b, __c)
1615 #define vmlsldavaq_s32(__a, __b, __c) __arm_vmlsldavaq_s32(__a, __b, __c)
1616 #define vmlsldavaxq_s32(__a, __b, __c) __arm_vmlsldavaxq_s32(__a, __b, __c)
1617 #define vabsq_m_f32(__inactive, __a, __p) __arm_vabsq_m_f32(__inactive, __a, __p)
1618 #define vcvtmq_m_s32_f32(__inactive, __a, __p) __arm_vcvtmq_m_s32_f32(__inactive, __a, __p)
1619 #define vcvtnq_m_s32_f32(__inactive, __a, __p) __arm_vcvtnq_m_s32_f32(__inactive, __a, __p)
1620 #define vcvtpq_m_s32_f32(__inactive, __a, __p) __arm_vcvtpq_m_s32_f32(__inactive, __a, __p)
1621 #define vcvtq_m_s32_f32(__inactive, __a, __p) __arm_vcvtq_m_s32_f32(__inactive, __a, __p)
1622 #define vdupq_m_n_f32(__inactive, __a, __p) __arm_vdupq_m_n_f32(__inactive, __a, __p)
1623 #define vmaxnmaq_m_f32(__a, __b, __p) __arm_vmaxnmaq_m_f32(__a, __b, __p)
1624 #define vmaxnmavq_p_f32(__a, __b, __p) __arm_vmaxnmavq_p_f32(__a, __b, __p)
1625 #define vmaxnmvq_p_f32(__a, __b, __p) __arm_vmaxnmvq_p_f32(__a, __b, __p)
1626 #define vminnmaq_m_f32(__a, __b, __p) __arm_vminnmaq_m_f32(__a, __b, __p)
1627 #define vminnmavq_p_f32(__a, __b, __p) __arm_vminnmavq_p_f32(__a, __b, __p)
1628 #define vminnmvq_p_f32(__a, __b, __p) __arm_vminnmvq_p_f32(__a, __b, __p)
1629 #define vmlaldavq_p_s32(__a, __b, __p) __arm_vmlaldavq_p_s32(__a, __b, __p)
1630 #define vmlaldavxq_p_s32(__a, __b, __p) __arm_vmlaldavxq_p_s32(__a, __b, __p)
1631 #define vmlsldavq_p_s32(__a, __b, __p) __arm_vmlsldavq_p_s32(__a, __b, __p)
1632 #define vmlsldavxq_p_s32(__a, __b, __p) __arm_vmlsldavxq_p_s32(__a, __b, __p)
1633 #define vmovlbq_m_s16(__inactive, __a, __p) __arm_vmovlbq_m_s16(__inactive, __a, __p)
1634 #define vmovltq_m_s16(__inactive, __a, __p) __arm_vmovltq_m_s16(__inactive, __a, __p)
1635 #define vmovnbq_m_s32(__a, __b, __p) __arm_vmovnbq_m_s32(__a, __b, __p)
1636 #define vmovntq_m_s32(__a, __b, __p) __arm_vmovntq_m_s32(__a, __b, __p)
1637 #define vnegq_m_f32(__inactive, __a, __p) __arm_vnegq_m_f32(__inactive, __a, __p)
1638 #define vpselq_f32(__a, __b, __p) __arm_vpselq_f32(__a, __b, __p)
1639 #define vqmovnbq_m_s32(__a, __b, __p) __arm_vqmovnbq_m_s32(__a, __b, __p)
1640 #define vqmovntq_m_s32(__a, __b, __p) __arm_vqmovntq_m_s32(__a, __b, __p)
1641 #define vrev32q_m_s16(__inactive, __a, __p) __arm_vrev32q_m_s16(__inactive, __a, __p)
1642 #define vrev64q_m_f32(__inactive, __a, __p) __arm_vrev64q_m_f32(__inactive, __a, __p)
1643 #define vrndaq_m_f32(__inactive, __a, __p) __arm_vrndaq_m_f32(__inactive, __a, __p)
1644 #define vrndmq_m_f32(__inactive, __a, __p) __arm_vrndmq_m_f32(__inactive, __a, __p)
1645 #define vrndnq_m_f32(__inactive, __a, __p) __arm_vrndnq_m_f32(__inactive, __a, __p)
1646 #define vrndpq_m_f32(__inactive, __a, __p) __arm_vrndpq_m_f32(__inactive, __a, __p)
1647 #define vrndq_m_f32(__inactive, __a, __p) __arm_vrndq_m_f32(__inactive, __a, __p)
1648 #define vrndxq_m_f32(__inactive, __a, __p) __arm_vrndxq_m_f32(__inactive, __a, __p)
1649 #define vcmpeqq_m_n_f32(__a, __b, __p) __arm_vcmpeqq_m_n_f32(__a, __b, __p)
1650 #define vcmpgeq_m_f32(__a, __b, __p) __arm_vcmpgeq_m_f32(__a, __b, __p)
1651 #define vcmpgeq_m_n_f32(__a, __b, __p) __arm_vcmpgeq_m_n_f32(__a, __b, __p)
1652 #define vcmpgtq_m_f32(__a, __b, __p) __arm_vcmpgtq_m_f32(__a, __b, __p)
1653 #define vcmpgtq_m_n_f32(__a, __b, __p) __arm_vcmpgtq_m_n_f32(__a, __b, __p)
1654 #define vcmpleq_m_f32(__a, __b, __p) __arm_vcmpleq_m_f32(__a, __b, __p)
1655 #define vcmpleq_m_n_f32(__a, __b, __p) __arm_vcmpleq_m_n_f32(__a, __b, __p)
1656 #define vcmpltq_m_f32(__a, __b, __p) __arm_vcmpltq_m_f32(__a, __b, __p)
1657 #define vcmpltq_m_n_f32(__a, __b, __p) __arm_vcmpltq_m_n_f32(__a, __b, __p)
1658 #define vcmpneq_m_f32(__a, __b, __p) __arm_vcmpneq_m_f32(__a, __b, __p)
1659 #define vcmpneq_m_n_f32(__a, __b, __p) __arm_vcmpneq_m_n_f32(__a, __b, __p)
1660 #define vmvnq_m_n_u32(__inactive, __imm, __p) __arm_vmvnq_m_n_u32(__inactive, __imm, __p)
1661 #define vorrq_m_n_u32(__a, __imm, __p) __arm_vorrq_m_n_u32(__a, __imm, __p)
1662 #define vqrshruntq_n_s32(__a, __b, __imm) __arm_vqrshruntq_n_s32(__a, __b, __imm)
1663 #define vqshrunbq_n_s32(__a, __b, __imm) __arm_vqshrunbq_n_s32(__a, __b, __imm)
1664 #define vqshruntq_n_s32(__a, __b, __imm) __arm_vqshruntq_n_s32(__a, __b, __imm)
1665 #define vcvtmq_m_u32_f32(__inactive, __a, __p) __arm_vcvtmq_m_u32_f32(__inactive, __a, __p)
1666 #define vcvtnq_m_u32_f32(__inactive, __a, __p) __arm_vcvtnq_m_u32_f32(__inactive, __a, __p)
1667 #define vcvtpq_m_u32_f32(__inactive, __a, __p) __arm_vcvtpq_m_u32_f32(__inactive, __a, __p)
1668 #define vcvtq_m_u32_f32(__inactive, __a, __p) __arm_vcvtq_m_u32_f32(__inactive, __a, __p)
1669 #define vqmovunbq_m_s32(__a, __b, __p) __arm_vqmovunbq_m_s32(__a, __b, __p)
1670 #define vqmovuntq_m_s32(__a, __b, __p) __arm_vqmovuntq_m_s32(__a, __b, __p)
1671 #define vqrshrntq_n_u32(__a, __b, __imm) __arm_vqrshrntq_n_u32(__a, __b, __imm)
1672 #define vqshrnbq_n_u32(__a, __b, __imm) __arm_vqshrnbq_n_u32(__a, __b, __imm)
1673 #define vqshrntq_n_u32(__a, __b, __imm) __arm_vqshrntq_n_u32(__a, __b, __imm)
1674 #define vrshrnbq_n_u32(__a, __b, __imm) __arm_vrshrnbq_n_u32(__a, __b, __imm)
1675 #define vrshrntq_n_u32(__a, __b, __imm) __arm_vrshrntq_n_u32(__a, __b, __imm)
1676 #define vshrnbq_n_u32(__a, __b, __imm) __arm_vshrnbq_n_u32(__a, __b, __imm)
1677 #define vshrntq_n_u32(__a, __b, __imm) __arm_vshrntq_n_u32(__a, __b, __imm)
1678 #define vmlaldavaq_u32(__a, __b, __c) __arm_vmlaldavaq_u32(__a, __b, __c)
1679 #define vmlaldavq_p_u32(__a, __b, __p) __arm_vmlaldavq_p_u32(__a, __b, __p)
1680 #define vmovlbq_m_u16(__inactive, __a, __p) __arm_vmovlbq_m_u16(__inactive, __a, __p)
1681 #define vmovltq_m_u16(__inactive, __a, __p) __arm_vmovltq_m_u16(__inactive, __a, __p)
1682 #define vmovnbq_m_u32(__a, __b, __p) __arm_vmovnbq_m_u32(__a, __b, __p)
1683 #define vmovntq_m_u32(__a, __b, __p) __arm_vmovntq_m_u32(__a, __b, __p)
1684 #define vqmovnbq_m_u32(__a, __b, __p) __arm_vqmovnbq_m_u32(__a, __b, __p)
1685 #define vqmovntq_m_u32(__a, __b, __p) __arm_vqmovntq_m_u32(__a, __b, __p)
1686 #define vrev32q_m_u16(__inactive, __a, __p) __arm_vrev32q_m_u16(__inactive, __a, __p)
1687 #define vsriq_m_n_s8(__a, __b, __imm, __p) __arm_vsriq_m_n_s8(__a, __b, __imm, __p)
1688 #define vsubq_m_s8(__inactive, __a, __b, __p) __arm_vsubq_m_s8(__inactive, __a, __b, __p)
1689 #define vcvtq_m_n_f16_u16(__inactive, __a, __imm6, __p) __arm_vcvtq_m_n_f16_u16(__inactive, __a, __imm6, __p)
1690 #define vqshluq_m_n_s8(__inactive, __a, __imm, __p) __arm_vqshluq_m_n_s8(__inactive, __a, __imm, __p)
1691 #define vabavq_p_s8(__a, __b, __c, __p) __arm_vabavq_p_s8(__a, __b, __c, __p)
1692 #define vsriq_m_n_u8(__a, __b, __imm, __p) __arm_vsriq_m_n_u8(__a, __b, __imm, __p)
1693 #define vshlq_m_u8(__inactive, __a, __b, __p) __arm_vshlq_m_u8(__inactive, __a, __b, __p)
1694 #define vsubq_m_u8(__inactive, __a, __b, __p) __arm_vsubq_m_u8(__inactive, __a, __b, __p)
1695 #define vabavq_p_u8(__a, __b, __c, __p) __arm_vabavq_p_u8(__a, __b, __c, __p)
1696 #define vshlq_m_s8(__inactive, __a, __b, __p) __arm_vshlq_m_s8(__inactive, __a, __b, __p)
1697 #define vcvtq_m_n_f16_s16(__inactive, __a, __imm6, __p) __arm_vcvtq_m_n_f16_s16(__inactive, __a, __imm6, __p)
1698 #define vsriq_m_n_s16(__a, __b, __imm, __p) __arm_vsriq_m_n_s16(__a, __b, __imm, __p)
1699 #define vsubq_m_s16(__inactive, __a, __b, __p) __arm_vsubq_m_s16(__inactive, __a, __b, __p)
1700 #define vcvtq_m_n_f32_u32(__inactive, __a, __imm6, __p) __arm_vcvtq_m_n_f32_u32(__inactive, __a, __imm6, __p)
1701 #define vqshluq_m_n_s16(__inactive, __a, __imm, __p) __arm_vqshluq_m_n_s16(__inactive, __a, __imm, __p)
1702 #define vabavq_p_s16(__a, __b, __c, __p) __arm_vabavq_p_s16(__a, __b, __c, __p)
1703 #define vsriq_m_n_u16(__a, __b, __imm, __p) __arm_vsriq_m_n_u16(__a, __b, __imm, __p)
1704 #define vshlq_m_u16(__inactive, __a, __b, __p) __arm_vshlq_m_u16(__inactive, __a, __b, __p)
1705 #define vsubq_m_u16(__inactive, __a, __b, __p) __arm_vsubq_m_u16(__inactive, __a, __b, __p)
1706 #define vabavq_p_u16(__a, __b, __c, __p) __arm_vabavq_p_u16(__a, __b, __c, __p)
1707 #define vshlq_m_s16(__inactive, __a, __b, __p) __arm_vshlq_m_s16(__inactive, __a, __b, __p)
1708 #define vcvtq_m_n_f32_s32(__inactive, __a, __imm6, __p) __arm_vcvtq_m_n_f32_s32(__inactive, __a, __imm6, __p)
1709 #define vsriq_m_n_s32(__a, __b, __imm, __p) __arm_vsriq_m_n_s32(__a, __b, __imm, __p)
1710 #define vsubq_m_s32(__inactive, __a, __b, __p) __arm_vsubq_m_s32(__inactive, __a, __b, __p)
1711 #define vqshluq_m_n_s32(__inactive, __a, __imm, __p) __arm_vqshluq_m_n_s32(__inactive, __a, __imm, __p)
1712 #define vabavq_p_s32(__a, __b, __c, __p) __arm_vabavq_p_s32(__a, __b, __c, __p)
1713 #define vsriq_m_n_u32(__a, __b, __imm, __p) __arm_vsriq_m_n_u32(__a, __b, __imm, __p)
1714 #define vshlq_m_u32(__inactive, __a, __b, __p) __arm_vshlq_m_u32(__inactive, __a, __b, __p)
1715 #define vsubq_m_u32(__inactive, __a, __b, __p) __arm_vsubq_m_u32(__inactive, __a, __b, __p)
1716 #define vabavq_p_u32(__a, __b, __c, __p) __arm_vabavq_p_u32(__a, __b, __c, __p)
1717 #define vshlq_m_s32(__inactive, __a, __b, __p) __arm_vshlq_m_s32(__inactive, __a, __b, __p)
1718 #define vabdq_m_s8(__inactive, __a, __b, __p) __arm_vabdq_m_s8(__inactive, __a, __b, __p)
1719 #define vabdq_m_s32(__inactive, __a, __b, __p) __arm_vabdq_m_s32(__inactive, __a, __b, __p)
1720 #define vabdq_m_s16(__inactive, __a, __b, __p) __arm_vabdq_m_s16(__inactive, __a, __b, __p)
1721 #define vabdq_m_u8(__inactive, __a, __b, __p) __arm_vabdq_m_u8(__inactive, __a, __b, __p)
1722 #define vabdq_m_u32(__inactive, __a, __b, __p) __arm_vabdq_m_u32(__inactive, __a, __b, __p)
1723 #define vabdq_m_u16(__inactive, __a, __b, __p) __arm_vabdq_m_u16(__inactive, __a, __b, __p)
1724 #define vaddq_m_n_s8(__inactive, __a, __b, __p) __arm_vaddq_m_n_s8(__inactive, __a, __b, __p)
1725 #define vaddq_m_n_s32(__inactive, __a, __b, __p) __arm_vaddq_m_n_s32(__inactive, __a, __b, __p)
1726 #define vaddq_m_n_s16(__inactive, __a, __b, __p) __arm_vaddq_m_n_s16(__inactive, __a, __b, __p)
1727 #define vaddq_m_n_u8(__inactive, __a, __b, __p) __arm_vaddq_m_n_u8(__inactive, __a, __b, __p)
1728 #define vaddq_m_n_u32(__inactive, __a, __b, __p) __arm_vaddq_m_n_u32(__inactive, __a, __b, __p)
1729 #define vaddq_m_n_u16(__inactive, __a, __b, __p) __arm_vaddq_m_n_u16(__inactive, __a, __b, __p)
1730 #define vaddq_m_s8(__inactive, __a, __b, __p) __arm_vaddq_m_s8(__inactive, __a, __b, __p)
1731 #define vaddq_m_s32(__inactive, __a, __b, __p) __arm_vaddq_m_s32(__inactive, __a, __b, __p)
1732 #define vaddq_m_s16(__inactive, __a, __b, __p) __arm_vaddq_m_s16(__inactive, __a, __b, __p)
1733 #define vaddq_m_u8(__inactive, __a, __b, __p) __arm_vaddq_m_u8(__inactive, __a, __b, __p)
1734 #define vaddq_m_u32(__inactive, __a, __b, __p) __arm_vaddq_m_u32(__inactive, __a, __b, __p)
1735 #define vaddq_m_u16(__inactive, __a, __b, __p) __arm_vaddq_m_u16(__inactive, __a, __b, __p)
1736 #define vandq_m_s8(__inactive, __a, __b, __p) __arm_vandq_m_s8(__inactive, __a, __b, __p)
1737 #define vandq_m_s32(__inactive, __a, __b, __p) __arm_vandq_m_s32(__inactive, __a, __b, __p)
1738 #define vandq_m_s16(__inactive, __a, __b, __p) __arm_vandq_m_s16(__inactive, __a, __b, __p)
1739 #define vandq_m_u8(__inactive, __a, __b, __p) __arm_vandq_m_u8(__inactive, __a, __b, __p)
1740 #define vandq_m_u32(__inactive, __a, __b, __p) __arm_vandq_m_u32(__inactive, __a, __b, __p)
1741 #define vandq_m_u16(__inactive, __a, __b, __p) __arm_vandq_m_u16(__inactive, __a, __b, __p)
1742 #define vbicq_m_s8(__inactive, __a, __b, __p) __arm_vbicq_m_s8(__inactive, __a, __b, __p)
1743 #define vbicq_m_s32(__inactive, __a, __b, __p) __arm_vbicq_m_s32(__inactive, __a, __b, __p)
1744 #define vbicq_m_s16(__inactive, __a, __b, __p) __arm_vbicq_m_s16(__inactive, __a, __b, __p)
1745 #define vbicq_m_u8(__inactive, __a, __b, __p) __arm_vbicq_m_u8(__inactive, __a, __b, __p)
1746 #define vbicq_m_u32(__inactive, __a, __b, __p) __arm_vbicq_m_u32(__inactive, __a, __b, __p)
1747 #define vbicq_m_u16(__inactive, __a, __b, __p) __arm_vbicq_m_u16(__inactive, __a, __b, __p)
1748 #define vbrsrq_m_n_s8(__inactive, __a, __b, __p) __arm_vbrsrq_m_n_s8(__inactive, __a, __b, __p)
1749 #define vbrsrq_m_n_s32(__inactive, __a, __b, __p) __arm_vbrsrq_m_n_s32(__inactive, __a, __b, __p)
1750 #define vbrsrq_m_n_s16(__inactive, __a, __b, __p) __arm_vbrsrq_m_n_s16(__inactive, __a, __b, __p)
1751 #define vbrsrq_m_n_u8(__inactive, __a, __b, __p) __arm_vbrsrq_m_n_u8(__inactive, __a, __b, __p)
1752 #define vbrsrq_m_n_u32(__inactive, __a, __b, __p) __arm_vbrsrq_m_n_u32(__inactive, __a, __b, __p)
1753 #define vbrsrq_m_n_u16(__inactive, __a, __b, __p) __arm_vbrsrq_m_n_u16(__inactive, __a, __b, __p)
1754 #define vcaddq_rot270_m_s8(__inactive, __a, __b, __p) __arm_vcaddq_rot270_m_s8(__inactive, __a, __b, __p)
1755 #define vcaddq_rot270_m_s32(__inactive, __a, __b, __p) __arm_vcaddq_rot270_m_s32(__inactive, __a, __b, __p)
1756 #define vcaddq_rot270_m_s16(__inactive, __a, __b, __p) __arm_vcaddq_rot270_m_s16(__inactive, __a, __b, __p)
1757 #define vcaddq_rot270_m_u8(__inactive, __a, __b, __p) __arm_vcaddq_rot270_m_u8(__inactive, __a, __b, __p)
1758 #define vcaddq_rot270_m_u32(__inactive, __a, __b, __p) __arm_vcaddq_rot270_m_u32(__inactive, __a, __b, __p)
1759 #define vcaddq_rot270_m_u16(__inactive, __a, __b, __p) __arm_vcaddq_rot270_m_u16(__inactive, __a, __b, __p)
1760 #define vcaddq_rot90_m_s8(__inactive, __a, __b, __p) __arm_vcaddq_rot90_m_s8(__inactive, __a, __b, __p)
1761 #define vcaddq_rot90_m_s32(__inactive, __a, __b, __p) __arm_vcaddq_rot90_m_s32(__inactive, __a, __b, __p)
1762 #define vcaddq_rot90_m_s16(__inactive, __a, __b, __p) __arm_vcaddq_rot90_m_s16(__inactive, __a, __b, __p)
1763 #define vcaddq_rot90_m_u8(__inactive, __a, __b, __p) __arm_vcaddq_rot90_m_u8(__inactive, __a, __b, __p)
1764 #define vcaddq_rot90_m_u32(__inactive, __a, __b, __p) __arm_vcaddq_rot90_m_u32(__inactive, __a, __b, __p)
1765 #define vcaddq_rot90_m_u16(__inactive, __a, __b, __p) __arm_vcaddq_rot90_m_u16(__inactive, __a, __b, __p)
1766 #define veorq_m_s8(__inactive, __a, __b, __p) __arm_veorq_m_s8(__inactive, __a, __b, __p)
1767 #define veorq_m_s32(__inactive, __a, __b, __p) __arm_veorq_m_s32(__inactive, __a, __b, __p)
1768 #define veorq_m_s16(__inactive, __a, __b, __p) __arm_veorq_m_s16(__inactive, __a, __b, __p)
1769 #define veorq_m_u8(__inactive, __a, __b, __p) __arm_veorq_m_u8(__inactive, __a, __b, __p)
1770 #define veorq_m_u32(__inactive, __a, __b, __p) __arm_veorq_m_u32(__inactive, __a, __b, __p)
1771 #define veorq_m_u16(__inactive, __a, __b, __p) __arm_veorq_m_u16(__inactive, __a, __b, __p)
1772 #define vhaddq_m_n_s8(__inactive, __a, __b, __p) __arm_vhaddq_m_n_s8(__inactive, __a, __b, __p)
1773 #define vhaddq_m_n_s32(__inactive, __a, __b, __p) __arm_vhaddq_m_n_s32(__inactive, __a, __b, __p)
1774 #define vhaddq_m_n_s16(__inactive, __a, __b, __p) __arm_vhaddq_m_n_s16(__inactive, __a, __b, __p)
1775 #define vhaddq_m_n_u8(__inactive, __a, __b, __p) __arm_vhaddq_m_n_u8(__inactive, __a, __b, __p)
1776 #define vhaddq_m_n_u32(__inactive, __a, __b, __p) __arm_vhaddq_m_n_u32(__inactive, __a, __b, __p)
1777 #define vhaddq_m_n_u16(__inactive, __a, __b, __p) __arm_vhaddq_m_n_u16(__inactive, __a, __b, __p)
1778 #define vhaddq_m_s8(__inactive, __a, __b, __p) __arm_vhaddq_m_s8(__inactive, __a, __b, __p)
1779 #define vhaddq_m_s32(__inactive, __a, __b, __p) __arm_vhaddq_m_s32(__inactive, __a, __b, __p)
1780 #define vhaddq_m_s16(__inactive, __a, __b, __p) __arm_vhaddq_m_s16(__inactive, __a, __b, __p)
1781 #define vhaddq_m_u8(__inactive, __a, __b, __p) __arm_vhaddq_m_u8(__inactive, __a, __b, __p)
1782 #define vhaddq_m_u32(__inactive, __a, __b, __p) __arm_vhaddq_m_u32(__inactive, __a, __b, __p)
1783 #define vhaddq_m_u16(__inactive, __a, __b, __p) __arm_vhaddq_m_u16(__inactive, __a, __b, __p)
1784 #define vhcaddq_rot270_m_s8(__inactive, __a, __b, __p) __arm_vhcaddq_rot270_m_s8(__inactive, __a, __b, __p)
1785 #define vhcaddq_rot270_m_s32(__inactive, __a, __b, __p) __arm_vhcaddq_rot270_m_s32(__inactive, __a, __b, __p)
1786 #define vhcaddq_rot270_m_s16(__inactive, __a, __b, __p) __arm_vhcaddq_rot270_m_s16(__inactive, __a, __b, __p)
1787 #define vhcaddq_rot90_m_s8(__inactive, __a, __b, __p) __arm_vhcaddq_rot90_m_s8(__inactive, __a, __b, __p)
1788 #define vhcaddq_rot90_m_s32(__inactive, __a, __b, __p) __arm_vhcaddq_rot90_m_s32(__inactive, __a, __b, __p)
1789 #define vhcaddq_rot90_m_s16(__inactive, __a, __b, __p) __arm_vhcaddq_rot90_m_s16(__inactive, __a, __b, __p)
1790 #define vhsubq_m_n_s8(__inactive, __a, __b, __p) __arm_vhsubq_m_n_s8(__inactive, __a, __b, __p)
1791 #define vhsubq_m_n_s32(__inactive, __a, __b, __p) __arm_vhsubq_m_n_s32(__inactive, __a, __b, __p)
1792 #define vhsubq_m_n_s16(__inactive, __a, __b, __p) __arm_vhsubq_m_n_s16(__inactive, __a, __b, __p)
1793 #define vhsubq_m_n_u8(__inactive, __a, __b, __p) __arm_vhsubq_m_n_u8(__inactive, __a, __b, __p)
1794 #define vhsubq_m_n_u32(__inactive, __a, __b, __p) __arm_vhsubq_m_n_u32(__inactive, __a, __b, __p)
1795 #define vhsubq_m_n_u16(__inactive, __a, __b, __p) __arm_vhsubq_m_n_u16(__inactive, __a, __b, __p)
1796 #define vhsubq_m_s8(__inactive, __a, __b, __p) __arm_vhsubq_m_s8(__inactive, __a, __b, __p)
1797 #define vhsubq_m_s32(__inactive, __a, __b, __p) __arm_vhsubq_m_s32(__inactive, __a, __b, __p)
1798 #define vhsubq_m_s16(__inactive, __a, __b, __p) __arm_vhsubq_m_s16(__inactive, __a, __b, __p)
1799 #define vhsubq_m_u8(__inactive, __a, __b, __p) __arm_vhsubq_m_u8(__inactive, __a, __b, __p)
1800 #define vhsubq_m_u32(__inactive, __a, __b, __p) __arm_vhsubq_m_u32(__inactive, __a, __b, __p)
1801 #define vhsubq_m_u16(__inactive, __a, __b, __p) __arm_vhsubq_m_u16(__inactive, __a, __b, __p)
1802 #define vmaxq_m_s8(__inactive, __a, __b, __p) __arm_vmaxq_m_s8(__inactive, __a, __b, __p)
1803 #define vmaxq_m_s32(__inactive, __a, __b, __p) __arm_vmaxq_m_s32(__inactive, __a, __b, __p)
1804 #define vmaxq_m_s16(__inactive, __a, __b, __p) __arm_vmaxq_m_s16(__inactive, __a, __b, __p)
1805 #define vmaxq_m_u8(__inactive, __a, __b, __p) __arm_vmaxq_m_u8(__inactive, __a, __b, __p)
1806 #define vmaxq_m_u32(__inactive, __a, __b, __p) __arm_vmaxq_m_u32(__inactive, __a, __b, __p)
1807 #define vmaxq_m_u16(__inactive, __a, __b, __p) __arm_vmaxq_m_u16(__inactive, __a, __b, __p)
1808 #define vminq_m_s8(__inactive, __a, __b, __p) __arm_vminq_m_s8(__inactive, __a, __b, __p)
1809 #define vminq_m_s32(__inactive, __a, __b, __p) __arm_vminq_m_s32(__inactive, __a, __b, __p)
1810 #define vminq_m_s16(__inactive, __a, __b, __p) __arm_vminq_m_s16(__inactive, __a, __b, __p)
1811 #define vminq_m_u8(__inactive, __a, __b, __p) __arm_vminq_m_u8(__inactive, __a, __b, __p)
1812 #define vminq_m_u32(__inactive, __a, __b, __p) __arm_vminq_m_u32(__inactive, __a, __b, __p)
1813 #define vminq_m_u16(__inactive, __a, __b, __p) __arm_vminq_m_u16(__inactive, __a, __b, __p)
1814 #define vmladavaq_p_s8(__a, __b, __c, __p) __arm_vmladavaq_p_s8(__a, __b, __c, __p)
1815 #define vmladavaq_p_s32(__a, __b, __c, __p) __arm_vmladavaq_p_s32(__a, __b, __c, __p)
1816 #define vmladavaq_p_s16(__a, __b, __c, __p) __arm_vmladavaq_p_s16(__a, __b, __c, __p)
1817 #define vmladavaq_p_u8(__a, __b, __c, __p) __arm_vmladavaq_p_u8(__a, __b, __c, __p)
1818 #define vmladavaq_p_u32(__a, __b, __c, __p) __arm_vmladavaq_p_u32(__a, __b, __c, __p)
1819 #define vmladavaq_p_u16(__a, __b, __c, __p) __arm_vmladavaq_p_u16(__a, __b, __c, __p)
1820 #define vmladavaxq_p_s8(__a, __b, __c, __p) __arm_vmladavaxq_p_s8(__a, __b, __c, __p)
1821 #define vmladavaxq_p_s32(__a, __b, __c, __p) __arm_vmladavaxq_p_s32(__a, __b, __c, __p)
1822 #define vmladavaxq_p_s16(__a, __b, __c, __p) __arm_vmladavaxq_p_s16(__a, __b, __c, __p)
1823 #define vmlaq_m_n_s8(__a, __b, __c, __p) __arm_vmlaq_m_n_s8(__a, __b, __c, __p)
1824 #define vmlaq_m_n_s32(__a, __b, __c, __p) __arm_vmlaq_m_n_s32(__a, __b, __c, __p)
1825 #define vmlaq_m_n_s16(__a, __b, __c, __p) __arm_vmlaq_m_n_s16(__a, __b, __c, __p)
1826 #define vmlaq_m_n_u8(__a, __b, __c, __p) __arm_vmlaq_m_n_u8(__a, __b, __c, __p)
1827 #define vmlaq_m_n_u32(__a, __b, __c, __p) __arm_vmlaq_m_n_u32(__a, __b, __c, __p)
1828 #define vmlaq_m_n_u16(__a, __b, __c, __p) __arm_vmlaq_m_n_u16(__a, __b, __c, __p)
1829 #define vmlasq_m_n_s8(__a, __b, __c, __p) __arm_vmlasq_m_n_s8(__a, __b, __c, __p)
1830 #define vmlasq_m_n_s32(__a, __b, __c, __p) __arm_vmlasq_m_n_s32(__a, __b, __c, __p)
1831 #define vmlasq_m_n_s16(__a, __b, __c, __p) __arm_vmlasq_m_n_s16(__a, __b, __c, __p)
1832 #define vmlasq_m_n_u8(__a, __b, __c, __p) __arm_vmlasq_m_n_u8(__a, __b, __c, __p)
1833 #define vmlasq_m_n_u32(__a, __b, __c, __p) __arm_vmlasq_m_n_u32(__a, __b, __c, __p)
1834 #define vmlasq_m_n_u16(__a, __b, __c, __p) __arm_vmlasq_m_n_u16(__a, __b, __c, __p)
1835 #define vmlsdavaq_p_s8(__a, __b, __c, __p) __arm_vmlsdavaq_p_s8(__a, __b, __c, __p)
1836 #define vmlsdavaq_p_s32(__a, __b, __c, __p) __arm_vmlsdavaq_p_s32(__a, __b, __c, __p)
1837 #define vmlsdavaq_p_s16(__a, __b, __c, __p) __arm_vmlsdavaq_p_s16(__a, __b, __c, __p)
1838 #define vmlsdavaxq_p_s8(__a, __b, __c, __p) __arm_vmlsdavaxq_p_s8(__a, __b, __c, __p)
1839 #define vmlsdavaxq_p_s32(__a, __b, __c, __p) __arm_vmlsdavaxq_p_s32(__a, __b, __c, __p)
1840 #define vmlsdavaxq_p_s16(__a, __b, __c, __p) __arm_vmlsdavaxq_p_s16(__a, __b, __c, __p)
1841 #define vmulhq_m_s8(__inactive, __a, __b, __p) __arm_vmulhq_m_s8(__inactive, __a, __b, __p)
1842 #define vmulhq_m_s32(__inactive, __a, __b, __p) __arm_vmulhq_m_s32(__inactive, __a, __b, __p)
1843 #define vmulhq_m_s16(__inactive, __a, __b, __p) __arm_vmulhq_m_s16(__inactive, __a, __b, __p)
1844 #define vmulhq_m_u8(__inactive, __a, __b, __p) __arm_vmulhq_m_u8(__inactive, __a, __b, __p)
1845 #define vmulhq_m_u32(__inactive, __a, __b, __p) __arm_vmulhq_m_u32(__inactive, __a, __b, __p)
1846 #define vmulhq_m_u16(__inactive, __a, __b, __p) __arm_vmulhq_m_u16(__inactive, __a, __b, __p)
1847 #define vmullbq_int_m_s8(__inactive, __a, __b, __p) __arm_vmullbq_int_m_s8(__inactive, __a, __b, __p)
1848 #define vmullbq_int_m_s32(__inactive, __a, __b, __p) __arm_vmullbq_int_m_s32(__inactive, __a, __b, __p)
1849 #define vmullbq_int_m_s16(__inactive, __a, __b, __p) __arm_vmullbq_int_m_s16(__inactive, __a, __b, __p)
1850 #define vmullbq_int_m_u8(__inactive, __a, __b, __p) __arm_vmullbq_int_m_u8(__inactive, __a, __b, __p)
1851 #define vmullbq_int_m_u32(__inactive, __a, __b, __p) __arm_vmullbq_int_m_u32(__inactive, __a, __b, __p)
1852 #define vmullbq_int_m_u16(__inactive, __a, __b, __p) __arm_vmullbq_int_m_u16(__inactive, __a, __b, __p)
1853 #define vmulltq_int_m_s8(__inactive, __a, __b, __p) __arm_vmulltq_int_m_s8(__inactive, __a, __b, __p)
1854 #define vmulltq_int_m_s32(__inactive, __a, __b, __p) __arm_vmulltq_int_m_s32(__inactive, __a, __b, __p)
1855 #define vmulltq_int_m_s16(__inactive, __a, __b, __p) __arm_vmulltq_int_m_s16(__inactive, __a, __b, __p)
1856 #define vmulltq_int_m_u8(__inactive, __a, __b, __p) __arm_vmulltq_int_m_u8(__inactive, __a, __b, __p)
1857 #define vmulltq_int_m_u32(__inactive, __a, __b, __p) __arm_vmulltq_int_m_u32(__inactive, __a, __b, __p)
1858 #define vmulltq_int_m_u16(__inactive, __a, __b, __p) __arm_vmulltq_int_m_u16(__inactive, __a, __b, __p)
1859 #define vmulq_m_n_s8(__inactive, __a, __b, __p) __arm_vmulq_m_n_s8(__inactive, __a, __b, __p)
1860 #define vmulq_m_n_s32(__inactive, __a, __b, __p) __arm_vmulq_m_n_s32(__inactive, __a, __b, __p)
1861 #define vmulq_m_n_s16(__inactive, __a, __b, __p) __arm_vmulq_m_n_s16(__inactive, __a, __b, __p)
1862 #define vmulq_m_n_u8(__inactive, __a, __b, __p) __arm_vmulq_m_n_u8(__inactive, __a, __b, __p)
1863 #define vmulq_m_n_u32(__inactive, __a, __b, __p) __arm_vmulq_m_n_u32(__inactive, __a, __b, __p)
1864 #define vmulq_m_n_u16(__inactive, __a, __b, __p) __arm_vmulq_m_n_u16(__inactive, __a, __b, __p)
1865 #define vmulq_m_s8(__inactive, __a, __b, __p) __arm_vmulq_m_s8(__inactive, __a, __b, __p)
1866 #define vmulq_m_s32(__inactive, __a, __b, __p) __arm_vmulq_m_s32(__inactive, __a, __b, __p)
1867 #define vmulq_m_s16(__inactive, __a, __b, __p) __arm_vmulq_m_s16(__inactive, __a, __b, __p)
1868 #define vmulq_m_u8(__inactive, __a, __b, __p) __arm_vmulq_m_u8(__inactive, __a, __b, __p)
1869 #define vmulq_m_u32(__inactive, __a, __b, __p) __arm_vmulq_m_u32(__inactive, __a, __b, __p)
1870 #define vmulq_m_u16(__inactive, __a, __b, __p) __arm_vmulq_m_u16(__inactive, __a, __b, __p)
1871 #define vornq_m_s8(__inactive, __a, __b, __p) __arm_vornq_m_s8(__inactive, __a, __b, __p)
1872 #define vornq_m_s32(__inactive, __a, __b, __p) __arm_vornq_m_s32(__inactive, __a, __b, __p)
1873 #define vornq_m_s16(__inactive, __a, __b, __p) __arm_vornq_m_s16(__inactive, __a, __b, __p)
1874 #define vornq_m_u8(__inactive, __a, __b, __p) __arm_vornq_m_u8(__inactive, __a, __b, __p)
1875 #define vornq_m_u32(__inactive, __a, __b, __p) __arm_vornq_m_u32(__inactive, __a, __b, __p)
1876 #define vornq_m_u16(__inactive, __a, __b, __p) __arm_vornq_m_u16(__inactive, __a, __b, __p)
1877 #define vorrq_m_s8(__inactive, __a, __b, __p) __arm_vorrq_m_s8(__inactive, __a, __b, __p)
1878 #define vorrq_m_s32(__inactive, __a, __b, __p) __arm_vorrq_m_s32(__inactive, __a, __b, __p)
1879 #define vorrq_m_s16(__inactive, __a, __b, __p) __arm_vorrq_m_s16(__inactive, __a, __b, __p)
1880 #define vorrq_m_u8(__inactive, __a, __b, __p) __arm_vorrq_m_u8(__inactive, __a, __b, __p)
1881 #define vorrq_m_u32(__inactive, __a, __b, __p) __arm_vorrq_m_u32(__inactive, __a, __b, __p)
1882 #define vorrq_m_u16(__inactive, __a, __b, __p) __arm_vorrq_m_u16(__inactive, __a, __b, __p)
1883 #define vqaddq_m_n_s8(__inactive, __a, __b, __p) __arm_vqaddq_m_n_s8(__inactive, __a, __b, __p)
1884 #define vqaddq_m_n_s32(__inactive, __a, __b, __p) __arm_vqaddq_m_n_s32(__inactive, __a, __b, __p)
1885 #define vqaddq_m_n_s16(__inactive, __a, __b, __p) __arm_vqaddq_m_n_s16(__inactive, __a, __b, __p)
1886 #define vqaddq_m_n_u8(__inactive, __a, __b, __p) __arm_vqaddq_m_n_u8(__inactive, __a, __b, __p)
1887 #define vqaddq_m_n_u32(__inactive, __a, __b, __p) __arm_vqaddq_m_n_u32(__inactive, __a, __b, __p)
1888 #define vqaddq_m_n_u16(__inactive, __a, __b, __p) __arm_vqaddq_m_n_u16(__inactive, __a, __b, __p)
1889 #define vqaddq_m_s8(__inactive, __a, __b, __p) __arm_vqaddq_m_s8(__inactive, __a, __b, __p)
1890 #define vqaddq_m_s32(__inactive, __a, __b, __p) __arm_vqaddq_m_s32(__inactive, __a, __b, __p)
1891 #define vqaddq_m_s16(__inactive, __a, __b, __p) __arm_vqaddq_m_s16(__inactive, __a, __b, __p)
1892 #define vqaddq_m_u8(__inactive, __a, __b, __p) __arm_vqaddq_m_u8(__inactive, __a, __b, __p)
1893 #define vqaddq_m_u32(__inactive, __a, __b, __p) __arm_vqaddq_m_u32(__inactive, __a, __b, __p)
1894 #define vqaddq_m_u16(__inactive, __a, __b, __p) __arm_vqaddq_m_u16(__inactive, __a, __b, __p)
1895 #define vqdmladhq_m_s8(__inactive, __a, __b, __p) __arm_vqdmladhq_m_s8(__inactive, __a, __b, __p)
1896 #define vqdmladhq_m_s32(__inactive, __a, __b, __p) __arm_vqdmladhq_m_s32(__inactive, __a, __b, __p)
1897 #define vqdmladhq_m_s16(__inactive, __a, __b, __p) __arm_vqdmladhq_m_s16(__inactive, __a, __b, __p)
1898 #define vqdmladhxq_m_s8(__inactive, __a, __b, __p) __arm_vqdmladhxq_m_s8(__inactive, __a, __b, __p)
1899 #define vqdmladhxq_m_s32(__inactive, __a, __b, __p) __arm_vqdmladhxq_m_s32(__inactive, __a, __b, __p)
1900 #define vqdmladhxq_m_s16(__inactive, __a, __b, __p) __arm_vqdmladhxq_m_s16(__inactive, __a, __b, __p)
1901 #define vqdmlashq_m_n_s8(__a, __b, __c, __p) __arm_vqdmlashq_m_n_s8(__a, __b, __c, __p)
1902 #define vqdmlashq_m_n_s32(__a, __b, __c, __p) __arm_vqdmlashq_m_n_s32(__a, __b, __c, __p)
1903 #define vqdmlashq_m_n_s16(__a, __b, __c, __p) __arm_vqdmlashq_m_n_s16(__a, __b, __c, __p)
1904 #define vqdmlahq_m_n_s8(__a, __b, __c, __p) __arm_vqdmlahq_m_n_s8(__a, __b, __c, __p)
1905 #define vqdmlahq_m_n_s32(__a, __b, __c, __p) __arm_vqdmlahq_m_n_s32(__a, __b, __c, __p)
1906 #define vqdmlahq_m_n_s16(__a, __b, __c, __p) __arm_vqdmlahq_m_n_s16(__a, __b, __c, __p)
1907 #define vqdmlsdhq_m_s8(__inactive, __a, __b, __p) __arm_vqdmlsdhq_m_s8(__inactive, __a, __b, __p)
1908 #define vqdmlsdhq_m_s32(__inactive, __a, __b, __p) __arm_vqdmlsdhq_m_s32(__inactive, __a, __b, __p)
1909 #define vqdmlsdhq_m_s16(__inactive, __a, __b, __p) __arm_vqdmlsdhq_m_s16(__inactive, __a, __b, __p)
1910 #define vqdmlsdhxq_m_s8(__inactive, __a, __b, __p) __arm_vqdmlsdhxq_m_s8(__inactive, __a, __b, __p)
1911 #define vqdmlsdhxq_m_s32(__inactive, __a, __b, __p) __arm_vqdmlsdhxq_m_s32(__inactive, __a, __b, __p)
1912 #define vqdmlsdhxq_m_s16(__inactive, __a, __b, __p) __arm_vqdmlsdhxq_m_s16(__inactive, __a, __b, __p)
1913 #define vqdmulhq_m_n_s8(__inactive, __a, __b, __p) __arm_vqdmulhq_m_n_s8(__inactive, __a, __b, __p)
1914 #define vqdmulhq_m_n_s32(__inactive, __a, __b, __p) __arm_vqdmulhq_m_n_s32(__inactive, __a, __b, __p)
1915 #define vqdmulhq_m_n_s16(__inactive, __a, __b, __p) __arm_vqdmulhq_m_n_s16(__inactive, __a, __b, __p)
1916 #define vqdmulhq_m_s8(__inactive, __a, __b, __p) __arm_vqdmulhq_m_s8(__inactive, __a, __b, __p)
1917 #define vqdmulhq_m_s32(__inactive, __a, __b, __p) __arm_vqdmulhq_m_s32(__inactive, __a, __b, __p)
1918 #define vqdmulhq_m_s16(__inactive, __a, __b, __p) __arm_vqdmulhq_m_s16(__inactive, __a, __b, __p)
1919 #define vqrdmladhq_m_s8(__inactive, __a, __b, __p) __arm_vqrdmladhq_m_s8(__inactive, __a, __b, __p)
1920 #define vqrdmladhq_m_s32(__inactive, __a, __b, __p) __arm_vqrdmladhq_m_s32(__inactive, __a, __b, __p)
1921 #define vqrdmladhq_m_s16(__inactive, __a, __b, __p) __arm_vqrdmladhq_m_s16(__inactive, __a, __b, __p)
1922 #define vqrdmladhxq_m_s8(__inactive, __a, __b, __p) __arm_vqrdmladhxq_m_s8(__inactive, __a, __b, __p)
1923 #define vqrdmladhxq_m_s32(__inactive, __a, __b, __p) __arm_vqrdmladhxq_m_s32(__inactive, __a, __b, __p)
1924 #define vqrdmladhxq_m_s16(__inactive, __a, __b, __p) __arm_vqrdmladhxq_m_s16(__inactive, __a, __b, __p)
1925 #define vqrdmlahq_m_n_s8(__a, __b, __c, __p) __arm_vqrdmlahq_m_n_s8(__a, __b, __c, __p)
1926 #define vqrdmlahq_m_n_s32(__a, __b, __c, __p) __arm_vqrdmlahq_m_n_s32(__a, __b, __c, __p)
1927 #define vqrdmlahq_m_n_s16(__a, __b, __c, __p) __arm_vqrdmlahq_m_n_s16(__a, __b, __c, __p)
1928 #define vqrdmlashq_m_n_s8(__a, __b, __c, __p) __arm_vqrdmlashq_m_n_s8(__a, __b, __c, __p)
1929 #define vqrdmlashq_m_n_s32(__a, __b, __c, __p) __arm_vqrdmlashq_m_n_s32(__a, __b, __c, __p)
1930 #define vqrdmlashq_m_n_s16(__a, __b, __c, __p) __arm_vqrdmlashq_m_n_s16(__a, __b, __c, __p)
1931 #define vqrdmlsdhq_m_s8(__inactive, __a, __b, __p) __arm_vqrdmlsdhq_m_s8(__inactive, __a, __b, __p)
1932 #define vqrdmlsdhq_m_s32(__inactive, __a, __b, __p) __arm_vqrdmlsdhq_m_s32(__inactive, __a, __b, __p)
1933 #define vqrdmlsdhq_m_s16(__inactive, __a, __b, __p) __arm_vqrdmlsdhq_m_s16(__inactive, __a, __b, __p)
1934 #define vqrdmlsdhxq_m_s8(__inactive, __a, __b, __p) __arm_vqrdmlsdhxq_m_s8(__inactive, __a, __b, __p)
1935 #define vqrdmlsdhxq_m_s32(__inactive, __a, __b, __p) __arm_vqrdmlsdhxq_m_s32(__inactive, __a, __b, __p)
1936 #define vqrdmlsdhxq_m_s16(__inactive, __a, __b, __p) __arm_vqrdmlsdhxq_m_s16(__inactive, __a, __b, __p)
1937 #define vqrdmulhq_m_n_s8(__inactive, __a, __b, __p) __arm_vqrdmulhq_m_n_s8(__inactive, __a, __b, __p)
1938 #define vqrdmulhq_m_n_s32(__inactive, __a, __b, __p) __arm_vqrdmulhq_m_n_s32(__inactive, __a, __b, __p)
1939 #define vqrdmulhq_m_n_s16(__inactive, __a, __b, __p) __arm_vqrdmulhq_m_n_s16(__inactive, __a, __b, __p)
1940 #define vqrdmulhq_m_s8(__inactive, __a, __b, __p) __arm_vqrdmulhq_m_s8(__inactive, __a, __b, __p)
1941 #define vqrdmulhq_m_s32(__inactive, __a, __b, __p) __arm_vqrdmulhq_m_s32(__inactive, __a, __b, __p)
1942 #define vqrdmulhq_m_s16(__inactive, __a, __b, __p) __arm_vqrdmulhq_m_s16(__inactive, __a, __b, __p)
1943 #define vqrshlq_m_s8(__inactive, __a, __b, __p) __arm_vqrshlq_m_s8(__inactive, __a, __b, __p)
1944 #define vqrshlq_m_s32(__inactive, __a, __b, __p) __arm_vqrshlq_m_s32(__inactive, __a, __b, __p)
1945 #define vqrshlq_m_s16(__inactive, __a, __b, __p) __arm_vqrshlq_m_s16(__inactive, __a, __b, __p)
1946 #define vqrshlq_m_u8(__inactive, __a, __b, __p) __arm_vqrshlq_m_u8(__inactive, __a, __b, __p)
1947 #define vqrshlq_m_u32(__inactive, __a, __b, __p) __arm_vqrshlq_m_u32(__inactive, __a, __b, __p)
1948 #define vqrshlq_m_u16(__inactive, __a, __b, __p) __arm_vqrshlq_m_u16(__inactive, __a, __b, __p)
1949 #define vqshlq_m_n_s8(__inactive, __a, __imm, __p) __arm_vqshlq_m_n_s8(__inactive, __a, __imm, __p)
1950 #define vqshlq_m_n_s32(__inactive, __a, __imm, __p) __arm_vqshlq_m_n_s32(__inactive, __a, __imm, __p)
1951 #define vqshlq_m_n_s16(__inactive, __a, __imm, __p) __arm_vqshlq_m_n_s16(__inactive, __a, __imm, __p)
1952 #define vqshlq_m_n_u8(__inactive, __a, __imm, __p) __arm_vqshlq_m_n_u8(__inactive, __a, __imm, __p)
1953 #define vqshlq_m_n_u32(__inactive, __a, __imm, __p) __arm_vqshlq_m_n_u32(__inactive, __a, __imm, __p)
1954 #define vqshlq_m_n_u16(__inactive, __a, __imm, __p) __arm_vqshlq_m_n_u16(__inactive, __a, __imm, __p)
1955 #define vqshlq_m_s8(__inactive, __a, __b, __p) __arm_vqshlq_m_s8(__inactive, __a, __b, __p)
1956 #define vqshlq_m_s32(__inactive, __a, __b, __p) __arm_vqshlq_m_s32(__inactive, __a, __b, __p)
1957 #define vqshlq_m_s16(__inactive, __a, __b, __p) __arm_vqshlq_m_s16(__inactive, __a, __b, __p)
1958 #define vqshlq_m_u8(__inactive, __a, __b, __p) __arm_vqshlq_m_u8(__inactive, __a, __b, __p)
1959 #define vqshlq_m_u32(__inactive, __a, __b, __p) __arm_vqshlq_m_u32(__inactive, __a, __b, __p)
1960 #define vqshlq_m_u16(__inactive, __a, __b, __p) __arm_vqshlq_m_u16(__inactive, __a, __b, __p)
1961 #define vqsubq_m_n_s8(__inactive, __a, __b, __p) __arm_vqsubq_m_n_s8(__inactive, __a, __b, __p)
1962 #define vqsubq_m_n_s32(__inactive, __a, __b, __p) __arm_vqsubq_m_n_s32(__inactive, __a, __b, __p)
1963 #define vqsubq_m_n_s16(__inactive, __a, __b, __p) __arm_vqsubq_m_n_s16(__inactive, __a, __b, __p)
1964 #define vqsubq_m_n_u8(__inactive, __a, __b, __p) __arm_vqsubq_m_n_u8(__inactive, __a, __b, __p)
1965 #define vqsubq_m_n_u32(__inactive, __a, __b, __p) __arm_vqsubq_m_n_u32(__inactive, __a, __b, __p)
1966 #define vqsubq_m_n_u16(__inactive, __a, __b, __p) __arm_vqsubq_m_n_u16(__inactive, __a, __b, __p)
1967 #define vqsubq_m_s8(__inactive, __a, __b, __p) __arm_vqsubq_m_s8(__inactive, __a, __b, __p)
1968 #define vqsubq_m_s32(__inactive, __a, __b, __p) __arm_vqsubq_m_s32(__inactive, __a, __b, __p)
1969 #define vqsubq_m_s16(__inactive, __a, __b, __p) __arm_vqsubq_m_s16(__inactive, __a, __b, __p)
1970 #define vqsubq_m_u8(__inactive, __a, __b, __p) __arm_vqsubq_m_u8(__inactive, __a, __b, __p)
1971 #define vqsubq_m_u32(__inactive, __a, __b, __p) __arm_vqsubq_m_u32(__inactive, __a, __b, __p)
1972 #define vqsubq_m_u16(__inactive, __a, __b, __p) __arm_vqsubq_m_u16(__inactive, __a, __b, __p)
1973 #define vrhaddq_m_s8(__inactive, __a, __b, __p) __arm_vrhaddq_m_s8(__inactive, __a, __b, __p)
1974 #define vrhaddq_m_s32(__inactive, __a, __b, __p) __arm_vrhaddq_m_s32(__inactive, __a, __b, __p)
1975 #define vrhaddq_m_s16(__inactive, __a, __b, __p) __arm_vrhaddq_m_s16(__inactive, __a, __b, __p)
1976 #define vrhaddq_m_u8(__inactive, __a, __b, __p) __arm_vrhaddq_m_u8(__inactive, __a, __b, __p)
1977 #define vrhaddq_m_u32(__inactive, __a, __b, __p) __arm_vrhaddq_m_u32(__inactive, __a, __b, __p)
1978 #define vrhaddq_m_u16(__inactive, __a, __b, __p) __arm_vrhaddq_m_u16(__inactive, __a, __b, __p)
1979 #define vrmulhq_m_s8(__inactive, __a, __b, __p) __arm_vrmulhq_m_s8(__inactive, __a, __b, __p)
1980 #define vrmulhq_m_s32(__inactive, __a, __b, __p) __arm_vrmulhq_m_s32(__inactive, __a, __b, __p)
1981 #define vrmulhq_m_s16(__inactive, __a, __b, __p) __arm_vrmulhq_m_s16(__inactive, __a, __b, __p)
1982 #define vrmulhq_m_u8(__inactive, __a, __b, __p) __arm_vrmulhq_m_u8(__inactive, __a, __b, __p)
1983 #define vrmulhq_m_u32(__inactive, __a, __b, __p) __arm_vrmulhq_m_u32(__inactive, __a, __b, __p)
1984 #define vrmulhq_m_u16(__inactive, __a, __b, __p) __arm_vrmulhq_m_u16(__inactive, __a, __b, __p)
1985 #define vrshlq_m_s8(__inactive, __a, __b, __p) __arm_vrshlq_m_s8(__inactive, __a, __b, __p)
1986 #define vrshlq_m_s32(__inactive, __a, __b, __p) __arm_vrshlq_m_s32(__inactive, __a, __b, __p)
1987 #define vrshlq_m_s16(__inactive, __a, __b, __p) __arm_vrshlq_m_s16(__inactive, __a, __b, __p)
1988 #define vrshlq_m_u8(__inactive, __a, __b, __p) __arm_vrshlq_m_u8(__inactive, __a, __b, __p)
1989 #define vrshlq_m_u32(__inactive, __a, __b, __p) __arm_vrshlq_m_u32(__inactive, __a, __b, __p)
1990 #define vrshlq_m_u16(__inactive, __a, __b, __p) __arm_vrshlq_m_u16(__inactive, __a, __b, __p)
1991 #define vrshrq_m_n_s8(__inactive, __a, __imm, __p) __arm_vrshrq_m_n_s8(__inactive, __a, __imm, __p)
1992 #define vrshrq_m_n_s32(__inactive, __a, __imm, __p) __arm_vrshrq_m_n_s32(__inactive, __a, __imm, __p)
1993 #define vrshrq_m_n_s16(__inactive, __a, __imm, __p) __arm_vrshrq_m_n_s16(__inactive, __a, __imm, __p)
1994 #define vrshrq_m_n_u8(__inactive, __a, __imm, __p) __arm_vrshrq_m_n_u8(__inactive, __a, __imm, __p)
1995 #define vrshrq_m_n_u32(__inactive, __a, __imm, __p) __arm_vrshrq_m_n_u32(__inactive, __a, __imm, __p)
1996 #define vrshrq_m_n_u16(__inactive, __a, __imm, __p) __arm_vrshrq_m_n_u16(__inactive, __a, __imm, __p)
1997 #define vshlq_m_n_s8(__inactive, __a, __imm, __p) __arm_vshlq_m_n_s8(__inactive, __a, __imm, __p)
1998 #define vshlq_m_n_s32(__inactive, __a, __imm, __p) __arm_vshlq_m_n_s32(__inactive, __a, __imm, __p)
1999 #define vshlq_m_n_s16(__inactive, __a, __imm, __p) __arm_vshlq_m_n_s16(__inactive, __a, __imm, __p)
2000 #define vshlq_m_n_u8(__inactive, __a, __imm, __p) __arm_vshlq_m_n_u8(__inactive, __a, __imm, __p)
2001 #define vshlq_m_n_u32(__inactive, __a, __imm, __p) __arm_vshlq_m_n_u32(__inactive, __a, __imm, __p)
2002 #define vshlq_m_n_u16(__inactive, __a, __imm, __p) __arm_vshlq_m_n_u16(__inactive, __a, __imm, __p)
2003 #define vshrq_m_n_s8(__inactive, __a, __imm, __p) __arm_vshrq_m_n_s8(__inactive, __a, __imm, __p)
2004 #define vshrq_m_n_s32(__inactive, __a, __imm, __p) __arm_vshrq_m_n_s32(__inactive, __a, __imm, __p)
2005 #define vshrq_m_n_s16(__inactive, __a, __imm, __p) __arm_vshrq_m_n_s16(__inactive, __a, __imm, __p)
2006 #define vshrq_m_n_u8(__inactive, __a, __imm, __p) __arm_vshrq_m_n_u8(__inactive, __a, __imm, __p)
2007 #define vshrq_m_n_u32(__inactive, __a, __imm, __p) __arm_vshrq_m_n_u32(__inactive, __a, __imm, __p)
2008 #define vshrq_m_n_u16(__inactive, __a, __imm, __p) __arm_vshrq_m_n_u16(__inactive, __a, __imm, __p)
2009 #define vsliq_m_n_s8(__a, __b, __imm, __p) __arm_vsliq_m_n_s8(__a, __b, __imm, __p)
2010 #define vsliq_m_n_s32(__a, __b, __imm, __p) __arm_vsliq_m_n_s32(__a, __b, __imm, __p)
2011 #define vsliq_m_n_s16(__a, __b, __imm, __p) __arm_vsliq_m_n_s16(__a, __b, __imm, __p)
2012 #define vsliq_m_n_u8(__a, __b, __imm, __p) __arm_vsliq_m_n_u8(__a, __b, __imm, __p)
2013 #define vsliq_m_n_u32(__a, __b, __imm, __p) __arm_vsliq_m_n_u32(__a, __b, __imm, __p)
2014 #define vsliq_m_n_u16(__a, __b, __imm, __p) __arm_vsliq_m_n_u16(__a, __b, __imm, __p)
2015 #define vsubq_m_n_s8(__inactive, __a, __b, __p) __arm_vsubq_m_n_s8(__inactive, __a, __b, __p)
2016 #define vsubq_m_n_s32(__inactive, __a, __b, __p) __arm_vsubq_m_n_s32(__inactive, __a, __b, __p)
2017 #define vsubq_m_n_s16(__inactive, __a, __b, __p) __arm_vsubq_m_n_s16(__inactive, __a, __b, __p)
2018 #define vsubq_m_n_u8(__inactive, __a, __b, __p) __arm_vsubq_m_n_u8(__inactive, __a, __b, __p)
2019 #define vsubq_m_n_u32(__inactive, __a, __b, __p) __arm_vsubq_m_n_u32(__inactive, __a, __b, __p)
2020 #define vsubq_m_n_u16(__inactive, __a, __b, __p) __arm_vsubq_m_n_u16(__inactive, __a, __b, __p)
2021 #define vmlaldavaq_p_s32(__a, __b, __c, __p) __arm_vmlaldavaq_p_s32(__a, __b, __c, __p)
2022 #define vmlaldavaq_p_s16(__a, __b, __c, __p) __arm_vmlaldavaq_p_s16(__a, __b, __c, __p)
2023 #define vmlaldavaq_p_u32(__a, __b, __c, __p) __arm_vmlaldavaq_p_u32(__a, __b, __c, __p)
2024 #define vmlaldavaq_p_u16(__a, __b, __c, __p) __arm_vmlaldavaq_p_u16(__a, __b, __c, __p)
2025 #define vmlaldavaxq_p_s32(__a, __b, __c, __p) __arm_vmlaldavaxq_p_s32(__a, __b, __c, __p)
2026 #define vmlaldavaxq_p_s16(__a, __b, __c, __p) __arm_vmlaldavaxq_p_s16(__a, __b, __c, __p)
2027 #define vmlsldavaq_p_s32(__a, __b, __c, __p) __arm_vmlsldavaq_p_s32(__a, __b, __c, __p)
2028 #define vmlsldavaq_p_s16(__a, __b, __c, __p) __arm_vmlsldavaq_p_s16(__a, __b, __c, __p)
2029 #define vmlsldavaxq_p_s32(__a, __b, __c, __p) __arm_vmlsldavaxq_p_s32(__a, __b, __c, __p)
2030 #define vmlsldavaxq_p_s16(__a, __b, __c, __p) __arm_vmlsldavaxq_p_s16(__a, __b, __c, __p)
2031 #define vmullbq_poly_m_p8(__inactive, __a, __b, __p) __arm_vmullbq_poly_m_p8(__inactive, __a, __b, __p)
2032 #define vmullbq_poly_m_p16(__inactive, __a, __b, __p) __arm_vmullbq_poly_m_p16(__inactive, __a, __b, __p)
2033 #define vmulltq_poly_m_p8(__inactive, __a, __b, __p) __arm_vmulltq_poly_m_p8(__inactive, __a, __b, __p)
2034 #define vmulltq_poly_m_p16(__inactive, __a, __b, __p) __arm_vmulltq_poly_m_p16(__inactive, __a, __b, __p)
2035 #define vqdmullbq_m_n_s32(__inactive, __a, __b, __p) __arm_vqdmullbq_m_n_s32(__inactive, __a, __b, __p)
2036 #define vqdmullbq_m_n_s16(__inactive, __a, __b, __p) __arm_vqdmullbq_m_n_s16(__inactive, __a, __b, __p)
2037 #define vqdmullbq_m_s32(__inactive, __a, __b, __p) __arm_vqdmullbq_m_s32(__inactive, __a, __b, __p)
2038 #define vqdmullbq_m_s16(__inactive, __a, __b, __p) __arm_vqdmullbq_m_s16(__inactive, __a, __b, __p)
2039 #define vqdmulltq_m_n_s32(__inactive, __a, __b, __p) __arm_vqdmulltq_m_n_s32(__inactive, __a, __b, __p)
2040 #define vqdmulltq_m_n_s16(__inactive, __a, __b, __p) __arm_vqdmulltq_m_n_s16(__inactive, __a, __b, __p)
2041 #define vqdmulltq_m_s32(__inactive, __a, __b, __p) __arm_vqdmulltq_m_s32(__inactive, __a, __b, __p)
2042 #define vqdmulltq_m_s16(__inactive, __a, __b, __p) __arm_vqdmulltq_m_s16(__inactive, __a, __b, __p)
2043 #define vqrshrnbq_m_n_s32(__a, __b, __imm, __p) __arm_vqrshrnbq_m_n_s32(__a, __b, __imm, __p)
2044 #define vqrshrnbq_m_n_s16(__a, __b, __imm, __p) __arm_vqrshrnbq_m_n_s16(__a, __b, __imm, __p)
2045 #define vqrshrnbq_m_n_u32(__a, __b, __imm, __p) __arm_vqrshrnbq_m_n_u32(__a, __b, __imm, __p)
2046 #define vqrshrnbq_m_n_u16(__a, __b, __imm, __p) __arm_vqrshrnbq_m_n_u16(__a, __b, __imm, __p)
2047 #define vqrshrntq_m_n_s32(__a, __b, __imm, __p) __arm_vqrshrntq_m_n_s32(__a, __b, __imm, __p)
2048 #define vqrshrntq_m_n_s16(__a, __b, __imm, __p) __arm_vqrshrntq_m_n_s16(__a, __b, __imm, __p)
2049 #define vqrshrntq_m_n_u32(__a, __b, __imm, __p) __arm_vqrshrntq_m_n_u32(__a, __b, __imm, __p)
2050 #define vqrshrntq_m_n_u16(__a, __b, __imm, __p) __arm_vqrshrntq_m_n_u16(__a, __b, __imm, __p)
2051 #define vqrshrunbq_m_n_s32(__a, __b, __imm, __p) __arm_vqrshrunbq_m_n_s32(__a, __b, __imm, __p)
2052 #define vqrshrunbq_m_n_s16(__a, __b, __imm, __p) __arm_vqrshrunbq_m_n_s16(__a, __b, __imm, __p)
2053 #define vqrshruntq_m_n_s32(__a, __b, __imm, __p) __arm_vqrshruntq_m_n_s32(__a, __b, __imm, __p)
2054 #define vqrshruntq_m_n_s16(__a, __b, __imm, __p) __arm_vqrshruntq_m_n_s16(__a, __b, __imm, __p)
2055 #define vqshrnbq_m_n_s32(__a, __b, __imm, __p) __arm_vqshrnbq_m_n_s32(__a, __b, __imm, __p)
2056 #define vqshrnbq_m_n_s16(__a, __b, __imm, __p) __arm_vqshrnbq_m_n_s16(__a, __b, __imm, __p)
2057 #define vqshrnbq_m_n_u32(__a, __b, __imm, __p) __arm_vqshrnbq_m_n_u32(__a, __b, __imm, __p)
2058 #define vqshrnbq_m_n_u16(__a, __b, __imm, __p) __arm_vqshrnbq_m_n_u16(__a, __b, __imm, __p)
2059 #define vqshrntq_m_n_s32(__a, __b, __imm, __p) __arm_vqshrntq_m_n_s32(__a, __b, __imm, __p)
2060 #define vqshrntq_m_n_s16(__a, __b, __imm, __p) __arm_vqshrntq_m_n_s16(__a, __b, __imm, __p)
2061 #define vqshrntq_m_n_u32(__a, __b, __imm, __p) __arm_vqshrntq_m_n_u32(__a, __b, __imm, __p)
2062 #define vqshrntq_m_n_u16(__a, __b, __imm, __p) __arm_vqshrntq_m_n_u16(__a, __b, __imm, __p)
2063 #define vqshrunbq_m_n_s32(__a, __b, __imm, __p) __arm_vqshrunbq_m_n_s32(__a, __b, __imm, __p)
2064 #define vqshrunbq_m_n_s16(__a, __b, __imm, __p) __arm_vqshrunbq_m_n_s16(__a, __b, __imm, __p)
2065 #define vqshruntq_m_n_s32(__a, __b, __imm, __p) __arm_vqshruntq_m_n_s32(__a, __b, __imm, __p)
2066 #define vqshruntq_m_n_s16(__a, __b, __imm, __p) __arm_vqshruntq_m_n_s16(__a, __b, __imm, __p)
2067 #define vrmlaldavhaq_p_s32(__a, __b, __c, __p) __arm_vrmlaldavhaq_p_s32(__a, __b, __c, __p)
2068 #define vrmlaldavhaq_p_u32(__a, __b, __c, __p) __arm_vrmlaldavhaq_p_u32(__a, __b, __c, __p)
2069 #define vrmlaldavhaxq_p_s32(__a, __b, __c, __p) __arm_vrmlaldavhaxq_p_s32(__a, __b, __c, __p)
2070 #define vrmlsldavhaq_p_s32(__a, __b, __c, __p) __arm_vrmlsldavhaq_p_s32(__a, __b, __c, __p)
2071 #define vrmlsldavhaxq_p_s32(__a, __b, __c, __p) __arm_vrmlsldavhaxq_p_s32(__a, __b, __c, __p)
2072 #define vrshrnbq_m_n_s32(__a, __b, __imm, __p) __arm_vrshrnbq_m_n_s32(__a, __b, __imm, __p)
2073 #define vrshrnbq_m_n_s16(__a, __b, __imm, __p) __arm_vrshrnbq_m_n_s16(__a, __b, __imm, __p)
2074 #define vrshrnbq_m_n_u32(__a, __b, __imm, __p) __arm_vrshrnbq_m_n_u32(__a, __b, __imm, __p)
2075 #define vrshrnbq_m_n_u16(__a, __b, __imm, __p) __arm_vrshrnbq_m_n_u16(__a, __b, __imm, __p)
2076 #define vrshrntq_m_n_s32(__a, __b, __imm, __p) __arm_vrshrntq_m_n_s32(__a, __b, __imm, __p)
2077 #define vrshrntq_m_n_s16(__a, __b, __imm, __p) __arm_vrshrntq_m_n_s16(__a, __b, __imm, __p)
2078 #define vrshrntq_m_n_u32(__a, __b, __imm, __p) __arm_vrshrntq_m_n_u32(__a, __b, __imm, __p)
2079 #define vrshrntq_m_n_u16(__a, __b, __imm, __p) __arm_vrshrntq_m_n_u16(__a, __b, __imm, __p)
2080 #define vshllbq_m_n_s8(__inactive, __a, __imm, __p) __arm_vshllbq_m_n_s8(__inactive, __a, __imm, __p)
2081 #define vshllbq_m_n_s16(__inactive, __a, __imm, __p) __arm_vshllbq_m_n_s16(__inactive, __a, __imm, __p)
2082 #define vshllbq_m_n_u8(__inactive, __a, __imm, __p) __arm_vshllbq_m_n_u8(__inactive, __a, __imm, __p)
2083 #define vshllbq_m_n_u16(__inactive, __a, __imm, __p) __arm_vshllbq_m_n_u16(__inactive, __a, __imm, __p)
2084 #define vshlltq_m_n_s8(__inactive, __a, __imm, __p) __arm_vshlltq_m_n_s8(__inactive, __a, __imm, __p)
2085 #define vshlltq_m_n_s16(__inactive, __a, __imm, __p) __arm_vshlltq_m_n_s16(__inactive, __a, __imm, __p)
2086 #define vshlltq_m_n_u8(__inactive, __a, __imm, __p) __arm_vshlltq_m_n_u8(__inactive, __a, __imm, __p)
2087 #define vshlltq_m_n_u16(__inactive, __a, __imm, __p) __arm_vshlltq_m_n_u16(__inactive, __a, __imm, __p)
2088 #define vshrnbq_m_n_s32(__a, __b, __imm, __p) __arm_vshrnbq_m_n_s32(__a, __b, __imm, __p)
2089 #define vshrnbq_m_n_s16(__a, __b, __imm, __p) __arm_vshrnbq_m_n_s16(__a, __b, __imm, __p)
2090 #define vshrnbq_m_n_u32(__a, __b, __imm, __p) __arm_vshrnbq_m_n_u32(__a, __b, __imm, __p)
2091 #define vshrnbq_m_n_u16(__a, __b, __imm, __p) __arm_vshrnbq_m_n_u16(__a, __b, __imm, __p)
2092 #define vshrntq_m_n_s32(__a, __b, __imm, __p) __arm_vshrntq_m_n_s32(__a, __b, __imm, __p)
2093 #define vshrntq_m_n_s16(__a, __b, __imm, __p) __arm_vshrntq_m_n_s16(__a, __b, __imm, __p)
2094 #define vshrntq_m_n_u32(__a, __b, __imm, __p) __arm_vshrntq_m_n_u32(__a, __b, __imm, __p)
2095 #define vshrntq_m_n_u16(__a, __b, __imm, __p) __arm_vshrntq_m_n_u16(__a, __b, __imm, __p)
2096 #define vabdq_m_f32(__inactive, __a, __b, __p) __arm_vabdq_m_f32(__inactive, __a, __b, __p)
2097 #define vabdq_m_f16(__inactive, __a, __b, __p) __arm_vabdq_m_f16(__inactive, __a, __b, __p)
2098 #define vaddq_m_f32(__inactive, __a, __b, __p) __arm_vaddq_m_f32(__inactive, __a, __b, __p)
2099 #define vaddq_m_f16(__inactive, __a, __b, __p) __arm_vaddq_m_f16(__inactive, __a, __b, __p)
2100 #define vaddq_m_n_f32(__inactive, __a, __b, __p) __arm_vaddq_m_n_f32(__inactive, __a, __b, __p)
2101 #define vaddq_m_n_f16(__inactive, __a, __b, __p) __arm_vaddq_m_n_f16(__inactive, __a, __b, __p)
2102 #define vandq_m_f32(__inactive, __a, __b, __p) __arm_vandq_m_f32(__inactive, __a, __b, __p)
2103 #define vandq_m_f16(__inactive, __a, __b, __p) __arm_vandq_m_f16(__inactive, __a, __b, __p)
2104 #define vbicq_m_f32(__inactive, __a, __b, __p) __arm_vbicq_m_f32(__inactive, __a, __b, __p)
2105 #define vbicq_m_f16(__inactive, __a, __b, __p) __arm_vbicq_m_f16(__inactive, __a, __b, __p)
2106 #define vbrsrq_m_n_f32(__inactive, __a, __b, __p) __arm_vbrsrq_m_n_f32(__inactive, __a, __b, __p)
2107 #define vbrsrq_m_n_f16(__inactive, __a, __b, __p) __arm_vbrsrq_m_n_f16(__inactive, __a, __b, __p)
2108 #define vcaddq_rot270_m_f32(__inactive, __a, __b, __p) __arm_vcaddq_rot270_m_f32(__inactive, __a, __b, __p)
2109 #define vcaddq_rot270_m_f16(__inactive, __a, __b, __p) __arm_vcaddq_rot270_m_f16(__inactive, __a, __b, __p)
2110 #define vcaddq_rot90_m_f32(__inactive, __a, __b, __p) __arm_vcaddq_rot90_m_f32(__inactive, __a, __b, __p)
2111 #define vcaddq_rot90_m_f16(__inactive, __a, __b, __p) __arm_vcaddq_rot90_m_f16(__inactive, __a, __b, __p)
2112 #define vcmlaq_m_f32(__a, __b, __c, __p) __arm_vcmlaq_m_f32(__a, __b, __c, __p)
2113 #define vcmlaq_m_f16(__a, __b, __c, __p) __arm_vcmlaq_m_f16(__a, __b, __c, __p)
2114 #define vcmlaq_rot180_m_f32(__a, __b, __c, __p) __arm_vcmlaq_rot180_m_f32(__a, __b, __c, __p)
2115 #define vcmlaq_rot180_m_f16(__a, __b, __c, __p) __arm_vcmlaq_rot180_m_f16(__a, __b, __c, __p)
2116 #define vcmlaq_rot270_m_f32(__a, __b, __c, __p) __arm_vcmlaq_rot270_m_f32(__a, __b, __c, __p)
2117 #define vcmlaq_rot270_m_f16(__a, __b, __c, __p) __arm_vcmlaq_rot270_m_f16(__a, __b, __c, __p)
2118 #define vcmlaq_rot90_m_f32(__a, __b, __c, __p) __arm_vcmlaq_rot90_m_f32(__a, __b, __c, __p)
2119 #define vcmlaq_rot90_m_f16(__a, __b, __c, __p) __arm_vcmlaq_rot90_m_f16(__a, __b, __c, __p)
2120 #define vcmulq_m_f32(__inactive, __a, __b, __p) __arm_vcmulq_m_f32(__inactive, __a, __b, __p)
2121 #define vcmulq_m_f16(__inactive, __a, __b, __p) __arm_vcmulq_m_f16(__inactive, __a, __b, __p)
2122 #define vcmulq_rot180_m_f32(__inactive, __a, __b, __p) __arm_vcmulq_rot180_m_f32(__inactive, __a, __b, __p)
2123 #define vcmulq_rot180_m_f16(__inactive, __a, __b, __p) __arm_vcmulq_rot180_m_f16(__inactive, __a, __b, __p)
2124 #define vcmulq_rot270_m_f32(__inactive, __a, __b, __p) __arm_vcmulq_rot270_m_f32(__inactive, __a, __b, __p)
2125 #define vcmulq_rot270_m_f16(__inactive, __a, __b, __p) __arm_vcmulq_rot270_m_f16(__inactive, __a, __b, __p)
2126 #define vcmulq_rot90_m_f32(__inactive, __a, __b, __p) __arm_vcmulq_rot90_m_f32(__inactive, __a, __b, __p)
2127 #define vcmulq_rot90_m_f16(__inactive, __a, __b, __p) __arm_vcmulq_rot90_m_f16(__inactive, __a, __b, __p)
2128 #define vcvtq_m_n_s32_f32(__inactive, __a, __imm6, __p) __arm_vcvtq_m_n_s32_f32(__inactive, __a, __imm6, __p)
2129 #define vcvtq_m_n_s16_f16(__inactive, __a, __imm6, __p) __arm_vcvtq_m_n_s16_f16(__inactive, __a, __imm6, __p)
2130 #define vcvtq_m_n_u32_f32(__inactive, __a, __imm6, __p) __arm_vcvtq_m_n_u32_f32(__inactive, __a, __imm6, __p)
2131 #define vcvtq_m_n_u16_f16(__inactive, __a, __imm6, __p) __arm_vcvtq_m_n_u16_f16(__inactive, __a, __imm6, __p)
2132 #define veorq_m_f32(__inactive, __a, __b, __p) __arm_veorq_m_f32(__inactive, __a, __b, __p)
2133 #define veorq_m_f16(__inactive, __a, __b, __p) __arm_veorq_m_f16(__inactive, __a, __b, __p)
2134 #define vfmaq_m_f32(__a, __b, __c, __p) __arm_vfmaq_m_f32(__a, __b, __c, __p)
2135 #define vfmaq_m_f16(__a, __b, __c, __p) __arm_vfmaq_m_f16(__a, __b, __c, __p)
2136 #define vfmaq_m_n_f32(__a, __b, __c, __p) __arm_vfmaq_m_n_f32(__a, __b, __c, __p)
2137 #define vfmaq_m_n_f16(__a, __b, __c, __p) __arm_vfmaq_m_n_f16(__a, __b, __c, __p)
2138 #define vfmasq_m_n_f32(__a, __b, __c, __p) __arm_vfmasq_m_n_f32(__a, __b, __c, __p)
2139 #define vfmasq_m_n_f16(__a, __b, __c, __p) __arm_vfmasq_m_n_f16(__a, __b, __c, __p)
2140 #define vfmsq_m_f32(__a, __b, __c, __p) __arm_vfmsq_m_f32(__a, __b, __c, __p)
2141 #define vfmsq_m_f16(__a, __b, __c, __p) __arm_vfmsq_m_f16(__a, __b, __c, __p)
2142 #define vmaxnmq_m_f32(__inactive, __a, __b, __p) __arm_vmaxnmq_m_f32(__inactive, __a, __b, __p)
2143 #define vmaxnmq_m_f16(__inactive, __a, __b, __p) __arm_vmaxnmq_m_f16(__inactive, __a, __b, __p)
2144 #define vminnmq_m_f32(__inactive, __a, __b, __p) __arm_vminnmq_m_f32(__inactive, __a, __b, __p)
2145 #define vminnmq_m_f16(__inactive, __a, __b, __p) __arm_vminnmq_m_f16(__inactive, __a, __b, __p)
2146 #define vmulq_m_f32(__inactive, __a, __b, __p) __arm_vmulq_m_f32(__inactive, __a, __b, __p)
2147 #define vmulq_m_f16(__inactive, __a, __b, __p) __arm_vmulq_m_f16(__inactive, __a, __b, __p)
2148 #define vmulq_m_n_f32(__inactive, __a, __b, __p) __arm_vmulq_m_n_f32(__inactive, __a, __b, __p)
2149 #define vmulq_m_n_f16(__inactive, __a, __b, __p) __arm_vmulq_m_n_f16(__inactive, __a, __b, __p)
2150 #define vornq_m_f32(__inactive, __a, __b, __p) __arm_vornq_m_f32(__inactive, __a, __b, __p)
2151 #define vornq_m_f16(__inactive, __a, __b, __p) __arm_vornq_m_f16(__inactive, __a, __b, __p)
2152 #define vorrq_m_f32(__inactive, __a, __b, __p) __arm_vorrq_m_f32(__inactive, __a, __b, __p)
2153 #define vorrq_m_f16(__inactive, __a, __b, __p) __arm_vorrq_m_f16(__inactive, __a, __b, __p)
2154 #define vsubq_m_f32(__inactive, __a, __b, __p) __arm_vsubq_m_f32(__inactive, __a, __b, __p)
2155 #define vsubq_m_f16(__inactive, __a, __b, __p) __arm_vsubq_m_f16(__inactive, __a, __b, __p)
2156 #define vsubq_m_n_f32(__inactive, __a, __b, __p) __arm_vsubq_m_n_f32(__inactive, __a, __b, __p)
2157 #define vsubq_m_n_f16(__inactive, __a, __b, __p) __arm_vsubq_m_n_f16(__inactive, __a, __b, __p)
2158 #define vstrbq_s8( __addr, __value) __arm_vstrbq_s8( __addr, __value)
2159 #define vstrbq_u8( __addr, __value) __arm_vstrbq_u8( __addr, __value)
2160 #define vstrbq_u16( __addr, __value) __arm_vstrbq_u16( __addr, __value)
2161 #define vstrbq_scatter_offset_s8( __base, __offset, __value) __arm_vstrbq_scatter_offset_s8( __base, __offset, __value)
2162 #define vstrbq_scatter_offset_u8( __base, __offset, __value) __arm_vstrbq_scatter_offset_u8( __base, __offset, __value)
2163 #define vstrbq_scatter_offset_u16( __base, __offset, __value) __arm_vstrbq_scatter_offset_u16( __base, __offset, __value)
2164 #define vstrbq_s16( __addr, __value) __arm_vstrbq_s16( __addr, __value)
2165 #define vstrbq_u32( __addr, __value) __arm_vstrbq_u32( __addr, __value)
2166 #define vstrbq_scatter_offset_s16( __base, __offset, __value) __arm_vstrbq_scatter_offset_s16( __base, __offset, __value)
2167 #define vstrbq_scatter_offset_u32( __base, __offset, __value) __arm_vstrbq_scatter_offset_u32( __base, __offset, __value)
2168 #define vstrbq_s32( __addr, __value) __arm_vstrbq_s32( __addr, __value)
2169 #define vstrbq_scatter_offset_s32( __base, __offset, __value) __arm_vstrbq_scatter_offset_s32( __base, __offset, __value)
2170 #define vstrwq_scatter_base_s32(__addr, __offset, __value) __arm_vstrwq_scatter_base_s32(__addr, __offset, __value)
2171 #define vstrwq_scatter_base_u32(__addr, __offset, __value) __arm_vstrwq_scatter_base_u32(__addr, __offset, __value)
2172 #define vldrbq_gather_offset_u8(__base, __offset) __arm_vldrbq_gather_offset_u8(__base, __offset)
2173 #define vldrbq_gather_offset_s8(__base, __offset) __arm_vldrbq_gather_offset_s8(__base, __offset)
2174 #define vldrbq_s8(__base) __arm_vldrbq_s8(__base)
2175 #define vldrbq_u8(__base) __arm_vldrbq_u8(__base)
2176 #define vldrbq_gather_offset_u16(__base, __offset) __arm_vldrbq_gather_offset_u16(__base, __offset)
2177 #define vldrbq_gather_offset_s16(__base, __offset) __arm_vldrbq_gather_offset_s16(__base, __offset)
2178 #define vldrbq_s16(__base) __arm_vldrbq_s16(__base)
2179 #define vldrbq_u16(__base) __arm_vldrbq_u16(__base)
2180 #define vldrbq_gather_offset_u32(__base, __offset) __arm_vldrbq_gather_offset_u32(__base, __offset)
2181 #define vldrbq_gather_offset_s32(__base, __offset) __arm_vldrbq_gather_offset_s32(__base, __offset)
2182 #define vldrbq_s32(__base) __arm_vldrbq_s32(__base)
2183 #define vldrbq_u32(__base) __arm_vldrbq_u32(__base)
2184 #define vldrwq_gather_base_s32(__addr, __offset) __arm_vldrwq_gather_base_s32(__addr, __offset)
2185 #define vldrwq_gather_base_u32(__addr, __offset) __arm_vldrwq_gather_base_u32(__addr, __offset)
2186 #define vstrbq_p_s8( __addr, __value, __p) __arm_vstrbq_p_s8( __addr, __value, __p)
2187 #define vstrbq_p_s32( __addr, __value, __p) __arm_vstrbq_p_s32( __addr, __value, __p)
2188 #define vstrbq_p_s16( __addr, __value, __p) __arm_vstrbq_p_s16( __addr, __value, __p)
2189 #define vstrbq_p_u8( __addr, __value, __p) __arm_vstrbq_p_u8( __addr, __value, __p)
2190 #define vstrbq_p_u32( __addr, __value, __p) __arm_vstrbq_p_u32( __addr, __value, __p)
2191 #define vstrbq_p_u16( __addr, __value, __p) __arm_vstrbq_p_u16( __addr, __value, __p)
2192 #define vstrbq_scatter_offset_p_s8( __base, __offset, __value, __p) __arm_vstrbq_scatter_offset_p_s8( __base, __offset, __value, __p)
2193 #define vstrbq_scatter_offset_p_s32( __base, __offset, __value, __p) __arm_vstrbq_scatter_offset_p_s32( __base, __offset, __value, __p)
2194 #define vstrbq_scatter_offset_p_s16( __base, __offset, __value, __p) __arm_vstrbq_scatter_offset_p_s16( __base, __offset, __value, __p)
2195 #define vstrbq_scatter_offset_p_u8( __base, __offset, __value, __p) __arm_vstrbq_scatter_offset_p_u8( __base, __offset, __value, __p)
2196 #define vstrbq_scatter_offset_p_u32( __base, __offset, __value, __p) __arm_vstrbq_scatter_offset_p_u32( __base, __offset, __value, __p)
2197 #define vstrbq_scatter_offset_p_u16( __base, __offset, __value, __p) __arm_vstrbq_scatter_offset_p_u16( __base, __offset, __value, __p)
2198 #define vstrwq_scatter_base_p_s32(__addr, __offset, __value, __p) __arm_vstrwq_scatter_base_p_s32(__addr, __offset, __value, __p)
2199 #define vstrwq_scatter_base_p_u32(__addr, __offset, __value, __p) __arm_vstrwq_scatter_base_p_u32(__addr, __offset, __value, __p)
2200 #define vldrbq_gather_offset_z_s16(__base, __offset, __p) __arm_vldrbq_gather_offset_z_s16(__base, __offset, __p)
2201 #define vldrbq_gather_offset_z_u8(__base, __offset, __p) __arm_vldrbq_gather_offset_z_u8(__base, __offset, __p)
2202 #define vldrbq_gather_offset_z_s32(__base, __offset, __p) __arm_vldrbq_gather_offset_z_s32(__base, __offset, __p)
2203 #define vldrbq_gather_offset_z_u16(__base, __offset, __p) __arm_vldrbq_gather_offset_z_u16(__base, __offset, __p)
2204 #define vldrbq_gather_offset_z_u32(__base, __offset, __p) __arm_vldrbq_gather_offset_z_u32(__base, __offset, __p)
2205 #define vldrbq_gather_offset_z_s8(__base, __offset, __p) __arm_vldrbq_gather_offset_z_s8(__base, __offset, __p)
2206 #define vldrbq_z_s16(__base, __p) __arm_vldrbq_z_s16(__base, __p)
2207 #define vldrbq_z_u8(__base, __p) __arm_vldrbq_z_u8(__base, __p)
2208 #define vldrbq_z_s8(__base, __p) __arm_vldrbq_z_s8(__base, __p)
2209 #define vldrbq_z_s32(__base, __p) __arm_vldrbq_z_s32(__base, __p)
2210 #define vldrbq_z_u16(__base, __p) __arm_vldrbq_z_u16(__base, __p)
2211 #define vldrbq_z_u32(__base, __p) __arm_vldrbq_z_u32(__base, __p)
2212 #define vldrwq_gather_base_z_u32(__addr, __offset, __p) __arm_vldrwq_gather_base_z_u32(__addr, __offset, __p)
2213 #define vldrwq_gather_base_z_s32(__addr, __offset, __p) __arm_vldrwq_gather_base_z_s32(__addr, __offset, __p)
2214 #define vld1q_s8(__base) __arm_vld1q_s8(__base)
2215 #define vld1q_s32(__base) __arm_vld1q_s32(__base)
2216 #define vld1q_s16(__base) __arm_vld1q_s16(__base)
2217 #define vld1q_u8(__base) __arm_vld1q_u8(__base)
2218 #define vld1q_u32(__base) __arm_vld1q_u32(__base)
2219 #define vld1q_u16(__base) __arm_vld1q_u16(__base)
2220 #define vldrhq_gather_offset_s32(__base, __offset) __arm_vldrhq_gather_offset_s32(__base, __offset)
2221 #define vldrhq_gather_offset_s16(__base, __offset) __arm_vldrhq_gather_offset_s16(__base, __offset)
2222 #define vldrhq_gather_offset_u32(__base, __offset) __arm_vldrhq_gather_offset_u32(__base, __offset)
2223 #define vldrhq_gather_offset_u16(__base, __offset) __arm_vldrhq_gather_offset_u16(__base, __offset)
2224 #define vldrhq_gather_offset_z_s32(__base, __offset, __p) __arm_vldrhq_gather_offset_z_s32(__base, __offset, __p)
2225 #define vldrhq_gather_offset_z_s16(__base, __offset, __p) __arm_vldrhq_gather_offset_z_s16(__base, __offset, __p)
2226 #define vldrhq_gather_offset_z_u32(__base, __offset, __p) __arm_vldrhq_gather_offset_z_u32(__base, __offset, __p)
2227 #define vldrhq_gather_offset_z_u16(__base, __offset, __p) __arm_vldrhq_gather_offset_z_u16(__base, __offset, __p)
2228 #define vldrhq_gather_shifted_offset_s32(__base, __offset) __arm_vldrhq_gather_shifted_offset_s32(__base, __offset)
2229 #define vldrhq_gather_shifted_offset_s16(__base, __offset) __arm_vldrhq_gather_shifted_offset_s16(__base, __offset)
2230 #define vldrhq_gather_shifted_offset_u32(__base, __offset) __arm_vldrhq_gather_shifted_offset_u32(__base, __offset)
2231 #define vldrhq_gather_shifted_offset_u16(__base, __offset) __arm_vldrhq_gather_shifted_offset_u16(__base, __offset)
2232 #define vldrhq_gather_shifted_offset_z_s32(__base, __offset, __p) __arm_vldrhq_gather_shifted_offset_z_s32(__base, __offset, __p)
2233 #define vldrhq_gather_shifted_offset_z_s16(__base, __offset, __p) __arm_vldrhq_gather_shifted_offset_z_s16(__base, __offset, __p)
2234 #define vldrhq_gather_shifted_offset_z_u32(__base, __offset, __p) __arm_vldrhq_gather_shifted_offset_z_u32(__base, __offset, __p)
2235 #define vldrhq_gather_shifted_offset_z_u16(__base, __offset, __p) __arm_vldrhq_gather_shifted_offset_z_u16(__base, __offset, __p)
2236 #define vldrhq_s32(__base) __arm_vldrhq_s32(__base)
2237 #define vldrhq_s16(__base) __arm_vldrhq_s16(__base)
2238 #define vldrhq_u32(__base) __arm_vldrhq_u32(__base)
2239 #define vldrhq_u16(__base) __arm_vldrhq_u16(__base)
2240 #define vldrhq_z_s32(__base, __p) __arm_vldrhq_z_s32(__base, __p)
2241 #define vldrhq_z_s16(__base, __p) __arm_vldrhq_z_s16(__base, __p)
2242 #define vldrhq_z_u32(__base, __p) __arm_vldrhq_z_u32(__base, __p)
2243 #define vldrhq_z_u16(__base, __p) __arm_vldrhq_z_u16(__base, __p)
2244 #define vldrwq_s32(__base) __arm_vldrwq_s32(__base)
2245 #define vldrwq_u32(__base) __arm_vldrwq_u32(__base)
2246 #define vldrwq_z_s32(__base, __p) __arm_vldrwq_z_s32(__base, __p)
2247 #define vldrwq_z_u32(__base, __p) __arm_vldrwq_z_u32(__base, __p)
2248 #define vld1q_f32(__base) __arm_vld1q_f32(__base)
2249 #define vld1q_f16(__base) __arm_vld1q_f16(__base)
2250 #define vldrhq_f16(__base) __arm_vldrhq_f16(__base)
2251 #define vldrhq_z_f16(__base, __p) __arm_vldrhq_z_f16(__base, __p)
2252 #define vldrwq_f32(__base) __arm_vldrwq_f32(__base)
2253 #define vldrwq_z_f32(__base, __p) __arm_vldrwq_z_f32(__base, __p)
2254 #define vldrdq_gather_base_s64(__addr, __offset) __arm_vldrdq_gather_base_s64(__addr, __offset)
2255 #define vldrdq_gather_base_u64(__addr, __offset) __arm_vldrdq_gather_base_u64(__addr, __offset)
2256 #define vldrdq_gather_base_z_s64(__addr, __offset, __p) __arm_vldrdq_gather_base_z_s64(__addr, __offset, __p)
2257 #define vldrdq_gather_base_z_u64(__addr, __offset, __p) __arm_vldrdq_gather_base_z_u64(__addr, __offset, __p)
2258 #define vldrdq_gather_offset_s64(__base, __offset) __arm_vldrdq_gather_offset_s64(__base, __offset)
2259 #define vldrdq_gather_offset_u64(__base, __offset) __arm_vldrdq_gather_offset_u64(__base, __offset)
2260 #define vldrdq_gather_offset_z_s64(__base, __offset, __p) __arm_vldrdq_gather_offset_z_s64(__base, __offset, __p)
2261 #define vldrdq_gather_offset_z_u64(__base, __offset, __p) __arm_vldrdq_gather_offset_z_u64(__base, __offset, __p)
2262 #define vldrdq_gather_shifted_offset_s64(__base, __offset) __arm_vldrdq_gather_shifted_offset_s64(__base, __offset)
2263 #define vldrdq_gather_shifted_offset_u64(__base, __offset) __arm_vldrdq_gather_shifted_offset_u64(__base, __offset)
2264 #define vldrdq_gather_shifted_offset_z_s64(__base, __offset, __p) __arm_vldrdq_gather_shifted_offset_z_s64(__base, __offset, __p)
2265 #define vldrdq_gather_shifted_offset_z_u64(__base, __offset, __p) __arm_vldrdq_gather_shifted_offset_z_u64(__base, __offset, __p)
2266 #define vldrhq_gather_offset_f16(__base, __offset) __arm_vldrhq_gather_offset_f16(__base, __offset)
2267 #define vldrhq_gather_offset_z_f16(__base, __offset, __p) __arm_vldrhq_gather_offset_z_f16(__base, __offset, __p)
2268 #define vldrhq_gather_shifted_offset_f16(__base, __offset) __arm_vldrhq_gather_shifted_offset_f16(__base, __offset)
2269 #define vldrhq_gather_shifted_offset_z_f16(__base, __offset, __p) __arm_vldrhq_gather_shifted_offset_z_f16(__base, __offset, __p)
2270 #define vldrwq_gather_base_f32(__addr, __offset) __arm_vldrwq_gather_base_f32(__addr, __offset)
2271 #define vldrwq_gather_base_z_f32(__addr, __offset, __p) __arm_vldrwq_gather_base_z_f32(__addr, __offset, __p)
2272 #define vldrwq_gather_offset_f32(__base, __offset) __arm_vldrwq_gather_offset_f32(__base, __offset)
2273 #define vldrwq_gather_offset_s32(__base, __offset) __arm_vldrwq_gather_offset_s32(__base, __offset)
2274 #define vldrwq_gather_offset_u32(__base, __offset) __arm_vldrwq_gather_offset_u32(__base, __offset)
2275 #define vldrwq_gather_offset_z_f32(__base, __offset, __p) __arm_vldrwq_gather_offset_z_f32(__base, __offset, __p)
2276 #define vldrwq_gather_offset_z_s32(__base, __offset, __p) __arm_vldrwq_gather_offset_z_s32(__base, __offset, __p)
2277 #define vldrwq_gather_offset_z_u32(__base, __offset, __p) __arm_vldrwq_gather_offset_z_u32(__base, __offset, __p)
2278 #define vldrwq_gather_shifted_offset_f32(__base, __offset) __arm_vldrwq_gather_shifted_offset_f32(__base, __offset)
2279 #define vldrwq_gather_shifted_offset_s32(__base, __offset) __arm_vldrwq_gather_shifted_offset_s32(__base, __offset)
2280 #define vldrwq_gather_shifted_offset_u32(__base, __offset) __arm_vldrwq_gather_shifted_offset_u32(__base, __offset)
2281 #define vldrwq_gather_shifted_offset_z_f32(__base, __offset, __p) __arm_vldrwq_gather_shifted_offset_z_f32(__base, __offset, __p)
2282 #define vldrwq_gather_shifted_offset_z_s32(__base, __offset, __p) __arm_vldrwq_gather_shifted_offset_z_s32(__base, __offset, __p)
2283 #define vldrwq_gather_shifted_offset_z_u32(__base, __offset, __p) __arm_vldrwq_gather_shifted_offset_z_u32(__base, __offset, __p)
2284 #define vst1q_f32(__addr, __value) __arm_vst1q_f32(__addr, __value)
2285 #define vst1q_f16(__addr, __value) __arm_vst1q_f16(__addr, __value)
2286 #define vst1q_s8(__addr, __value) __arm_vst1q_s8(__addr, __value)
2287 #define vst1q_s32(__addr, __value) __arm_vst1q_s32(__addr, __value)
2288 #define vst1q_s16(__addr, __value) __arm_vst1q_s16(__addr, __value)
2289 #define vst1q_u8(__addr, __value) __arm_vst1q_u8(__addr, __value)
2290 #define vst1q_u32(__addr, __value) __arm_vst1q_u32(__addr, __value)
2291 #define vst1q_u16(__addr, __value) __arm_vst1q_u16(__addr, __value)
2292 #define vstrhq_f16(__addr, __value) __arm_vstrhq_f16(__addr, __value)
2293 #define vstrhq_scatter_offset_s32( __base, __offset, __value) __arm_vstrhq_scatter_offset_s32( __base, __offset, __value)
2294 #define vstrhq_scatter_offset_s16( __base, __offset, __value) __arm_vstrhq_scatter_offset_s16( __base, __offset, __value)
2295 #define vstrhq_scatter_offset_u32( __base, __offset, __value) __arm_vstrhq_scatter_offset_u32( __base, __offset, __value)
2296 #define vstrhq_scatter_offset_u16( __base, __offset, __value) __arm_vstrhq_scatter_offset_u16( __base, __offset, __value)
2297 #define vstrhq_scatter_offset_p_s32( __base, __offset, __value, __p) __arm_vstrhq_scatter_offset_p_s32( __base, __offset, __value, __p)
2298 #define vstrhq_scatter_offset_p_s16( __base, __offset, __value, __p) __arm_vstrhq_scatter_offset_p_s16( __base, __offset, __value, __p)
2299 #define vstrhq_scatter_offset_p_u32( __base, __offset, __value, __p) __arm_vstrhq_scatter_offset_p_u32( __base, __offset, __value, __p)
2300 #define vstrhq_scatter_offset_p_u16( __base, __offset, __value, __p) __arm_vstrhq_scatter_offset_p_u16( __base, __offset, __value, __p)
2301 #define vstrhq_scatter_shifted_offset_s32( __base, __offset, __value) __arm_vstrhq_scatter_shifted_offset_s32( __base, __offset, __value)
2302 #define vstrhq_scatter_shifted_offset_s16( __base, __offset, __value) __arm_vstrhq_scatter_shifted_offset_s16( __base, __offset, __value)
2303 #define vstrhq_scatter_shifted_offset_u32( __base, __offset, __value) __arm_vstrhq_scatter_shifted_offset_u32( __base, __offset, __value)
2304 #define vstrhq_scatter_shifted_offset_u16( __base, __offset, __value) __arm_vstrhq_scatter_shifted_offset_u16( __base, __offset, __value)
2305 #define vstrhq_scatter_shifted_offset_p_s32( __base, __offset, __value, __p) __arm_vstrhq_scatter_shifted_offset_p_s32( __base, __offset, __value, __p)
2306 #define vstrhq_scatter_shifted_offset_p_s16( __base, __offset, __value, __p) __arm_vstrhq_scatter_shifted_offset_p_s16( __base, __offset, __value, __p)
2307 #define vstrhq_scatter_shifted_offset_p_u32( __base, __offset, __value, __p) __arm_vstrhq_scatter_shifted_offset_p_u32( __base, __offset, __value, __p)
2308 #define vstrhq_scatter_shifted_offset_p_u16( __base, __offset, __value, __p) __arm_vstrhq_scatter_shifted_offset_p_u16( __base, __offset, __value, __p)
2309 #define vstrhq_s32(__addr, __value) __arm_vstrhq_s32(__addr, __value)
2310 #define vstrhq_s16(__addr, __value) __arm_vstrhq_s16(__addr, __value)
2311 #define vstrhq_u32(__addr, __value) __arm_vstrhq_u32(__addr, __value)
2312 #define vstrhq_u16(__addr, __value) __arm_vstrhq_u16(__addr, __value)
2313 #define vstrhq_p_f16(__addr, __value, __p) __arm_vstrhq_p_f16(__addr, __value, __p)
2314 #define vstrhq_p_s32(__addr, __value, __p) __arm_vstrhq_p_s32(__addr, __value, __p)
2315 #define vstrhq_p_s16(__addr, __value, __p) __arm_vstrhq_p_s16(__addr, __value, __p)
2316 #define vstrhq_p_u32(__addr, __value, __p) __arm_vstrhq_p_u32(__addr, __value, __p)
2317 #define vstrhq_p_u16(__addr, __value, __p) __arm_vstrhq_p_u16(__addr, __value, __p)
2318 #define vstrwq_f32(__addr, __value) __arm_vstrwq_f32(__addr, __value)
2319 #define vstrwq_s32(__addr, __value) __arm_vstrwq_s32(__addr, __value)
2320 #define vstrwq_u32(__addr, __value) __arm_vstrwq_u32(__addr, __value)
2321 #define vstrwq_p_f32(__addr, __value, __p) __arm_vstrwq_p_f32(__addr, __value, __p)
2322 #define vstrwq_p_s32(__addr, __value, __p) __arm_vstrwq_p_s32(__addr, __value, __p)
2323 #define vstrwq_p_u32(__addr, __value, __p) __arm_vstrwq_p_u32(__addr, __value, __p)
2324 #define vstrdq_scatter_base_p_s64(__addr, __offset, __value, __p) __arm_vstrdq_scatter_base_p_s64(__addr, __offset, __value, __p)
2325 #define vstrdq_scatter_base_p_u64(__addr, __offset, __value, __p) __arm_vstrdq_scatter_base_p_u64(__addr, __offset, __value, __p)
2326 #define vstrdq_scatter_base_s64(__addr, __offset, __value) __arm_vstrdq_scatter_base_s64(__addr, __offset, __value)
2327 #define vstrdq_scatter_base_u64(__addr, __offset, __value) __arm_vstrdq_scatter_base_u64(__addr, __offset, __value)
2328 #define vstrdq_scatter_offset_p_s64(__base, __offset, __value, __p) __arm_vstrdq_scatter_offset_p_s64(__base, __offset, __value, __p)
2329 #define vstrdq_scatter_offset_p_u64(__base, __offset, __value, __p) __arm_vstrdq_scatter_offset_p_u64(__base, __offset, __value, __p)
2330 #define vstrdq_scatter_offset_s64(__base, __offset, __value) __arm_vstrdq_scatter_offset_s64(__base, __offset, __value)
2331 #define vstrdq_scatter_offset_u64(__base, __offset, __value) __arm_vstrdq_scatter_offset_u64(__base, __offset, __value)
2332 #define vstrdq_scatter_shifted_offset_p_s64(__base, __offset, __value, __p) __arm_vstrdq_scatter_shifted_offset_p_s64(__base, __offset, __value, __p)
2333 #define vstrdq_scatter_shifted_offset_p_u64(__base, __offset, __value, __p) __arm_vstrdq_scatter_shifted_offset_p_u64(__base, __offset, __value, __p)
2334 #define vstrdq_scatter_shifted_offset_s64(__base, __offset, __value) __arm_vstrdq_scatter_shifted_offset_s64(__base, __offset, __value)
2335 #define vstrdq_scatter_shifted_offset_u64(__base, __offset, __value) __arm_vstrdq_scatter_shifted_offset_u64(__base, __offset, __value)
2336 #define vstrhq_scatter_offset_f16(__base, __offset, __value) __arm_vstrhq_scatter_offset_f16(__base, __offset, __value)
2337 #define vstrhq_scatter_offset_p_f16(__base, __offset, __value, __p) __arm_vstrhq_scatter_offset_p_f16(__base, __offset, __value, __p)
2338 #define vstrhq_scatter_shifted_offset_f16(__base, __offset, __value) __arm_vstrhq_scatter_shifted_offset_f16(__base, __offset, __value)
2339 #define vstrhq_scatter_shifted_offset_p_f16(__base, __offset, __value, __p) __arm_vstrhq_scatter_shifted_offset_p_f16(__base, __offset, __value, __p)
2340 #define vstrwq_scatter_base_f32(__addr, __offset, __value) __arm_vstrwq_scatter_base_f32(__addr, __offset, __value)
2341 #define vstrwq_scatter_base_p_f32(__addr, __offset, __value, __p) __arm_vstrwq_scatter_base_p_f32(__addr, __offset, __value, __p)
2342 #define vstrwq_scatter_offset_f32(__base, __offset, __value) __arm_vstrwq_scatter_offset_f32(__base, __offset, __value)
2343 #define vstrwq_scatter_offset_p_f32(__base, __offset, __value, __p) __arm_vstrwq_scatter_offset_p_f32(__base, __offset, __value, __p)
2344 #define vstrwq_scatter_offset_p_s32(__base, __offset, __value, __p) __arm_vstrwq_scatter_offset_p_s32(__base, __offset, __value, __p)
2345 #define vstrwq_scatter_offset_p_u32(__base, __offset, __value, __p) __arm_vstrwq_scatter_offset_p_u32(__base, __offset, __value, __p)
2346 #define vstrwq_scatter_offset_s32(__base, __offset, __value) __arm_vstrwq_scatter_offset_s32(__base, __offset, __value)
2347 #define vstrwq_scatter_offset_u32(__base, __offset, __value) __arm_vstrwq_scatter_offset_u32(__base, __offset, __value)
2348 #define vstrwq_scatter_shifted_offset_f32(__base, __offset, __value) __arm_vstrwq_scatter_shifted_offset_f32(__base, __offset, __value)
2349 #define vstrwq_scatter_shifted_offset_p_f32(__base, __offset, __value, __p) __arm_vstrwq_scatter_shifted_offset_p_f32(__base, __offset, __value, __p)
2350 #define vstrwq_scatter_shifted_offset_p_s32(__base, __offset, __value, __p) __arm_vstrwq_scatter_shifted_offset_p_s32(__base, __offset, __value, __p)
2351 #define vstrwq_scatter_shifted_offset_p_u32(__base, __offset, __value, __p) __arm_vstrwq_scatter_shifted_offset_p_u32(__base, __offset, __value, __p)
2352 #define vstrwq_scatter_shifted_offset_s32(__base, __offset, __value) __arm_vstrwq_scatter_shifted_offset_s32(__base, __offset, __value)
2353 #define vstrwq_scatter_shifted_offset_u32(__base, __offset, __value) __arm_vstrwq_scatter_shifted_offset_u32(__base, __offset, __value)
2354 #define vaddq_s8(__a, __b) __arm_vaddq_s8(__a, __b)
2355 #define vaddq_s16(__a, __b) __arm_vaddq_s16(__a, __b)
2356 #define vaddq_s32(__a, __b) __arm_vaddq_s32(__a, __b)
2357 #define vaddq_u8(__a, __b) __arm_vaddq_u8(__a, __b)
2358 #define vaddq_u16(__a, __b) __arm_vaddq_u16(__a, __b)
2359 #define vaddq_u32(__a, __b) __arm_vaddq_u32(__a, __b)
2360 #define vaddq_f16(__a, __b) __arm_vaddq_f16(__a, __b)
2361 #define vaddq_f32(__a, __b) __arm_vaddq_f32(__a, __b)
2362 #define vreinterpretq_s16_s32(__a) __arm_vreinterpretq_s16_s32(__a)
2363 #define vreinterpretq_s16_s64(__a) __arm_vreinterpretq_s16_s64(__a)
2364 #define vreinterpretq_s16_s8(__a) __arm_vreinterpretq_s16_s8(__a)
2365 #define vreinterpretq_s16_u16(__a) __arm_vreinterpretq_s16_u16(__a)
2366 #define vreinterpretq_s16_u32(__a) __arm_vreinterpretq_s16_u32(__a)
2367 #define vreinterpretq_s16_u64(__a) __arm_vreinterpretq_s16_u64(__a)
2368 #define vreinterpretq_s16_u8(__a) __arm_vreinterpretq_s16_u8(__a)
2369 #define vreinterpretq_s32_s16(__a) __arm_vreinterpretq_s32_s16(__a)
2370 #define vreinterpretq_s32_s64(__a) __arm_vreinterpretq_s32_s64(__a)
2371 #define vreinterpretq_s32_s8(__a) __arm_vreinterpretq_s32_s8(__a)
2372 #define vreinterpretq_s32_u16(__a) __arm_vreinterpretq_s32_u16(__a)
2373 #define vreinterpretq_s32_u32(__a) __arm_vreinterpretq_s32_u32(__a)
2374 #define vreinterpretq_s32_u64(__a) __arm_vreinterpretq_s32_u64(__a)
2375 #define vreinterpretq_s32_u8(__a) __arm_vreinterpretq_s32_u8(__a)
2376 #define vreinterpretq_s64_s16(__a) __arm_vreinterpretq_s64_s16(__a)
2377 #define vreinterpretq_s64_s32(__a) __arm_vreinterpretq_s64_s32(__a)
2378 #define vreinterpretq_s64_s8(__a) __arm_vreinterpretq_s64_s8(__a)
2379 #define vreinterpretq_s64_u16(__a) __arm_vreinterpretq_s64_u16(__a)
2380 #define vreinterpretq_s64_u32(__a) __arm_vreinterpretq_s64_u32(__a)
2381 #define vreinterpretq_s64_u64(__a) __arm_vreinterpretq_s64_u64(__a)
2382 #define vreinterpretq_s64_u8(__a) __arm_vreinterpretq_s64_u8(__a)
2383 #define vreinterpretq_s8_s16(__a) __arm_vreinterpretq_s8_s16(__a)
2384 #define vreinterpretq_s8_s32(__a) __arm_vreinterpretq_s8_s32(__a)
2385 #define vreinterpretq_s8_s64(__a) __arm_vreinterpretq_s8_s64(__a)
2386 #define vreinterpretq_s8_u16(__a) __arm_vreinterpretq_s8_u16(__a)
2387 #define vreinterpretq_s8_u32(__a) __arm_vreinterpretq_s8_u32(__a)
2388 #define vreinterpretq_s8_u64(__a) __arm_vreinterpretq_s8_u64(__a)
2389 #define vreinterpretq_s8_u8(__a) __arm_vreinterpretq_s8_u8(__a)
2390 #define vreinterpretq_u16_s16(__a) __arm_vreinterpretq_u16_s16(__a)
2391 #define vreinterpretq_u16_s32(__a) __arm_vreinterpretq_u16_s32(__a)
2392 #define vreinterpretq_u16_s64(__a) __arm_vreinterpretq_u16_s64(__a)
2393 #define vreinterpretq_u16_s8(__a) __arm_vreinterpretq_u16_s8(__a)
2394 #define vreinterpretq_u16_u32(__a) __arm_vreinterpretq_u16_u32(__a)
2395 #define vreinterpretq_u16_u64(__a) __arm_vreinterpretq_u16_u64(__a)
2396 #define vreinterpretq_u16_u8(__a) __arm_vreinterpretq_u16_u8(__a)
2397 #define vreinterpretq_u32_s16(__a) __arm_vreinterpretq_u32_s16(__a)
2398 #define vreinterpretq_u32_s32(__a) __arm_vreinterpretq_u32_s32(__a)
2399 #define vreinterpretq_u32_s64(__a) __arm_vreinterpretq_u32_s64(__a)
2400 #define vreinterpretq_u32_s8(__a) __arm_vreinterpretq_u32_s8(__a)
2401 #define vreinterpretq_u32_u16(__a) __arm_vreinterpretq_u32_u16(__a)
2402 #define vreinterpretq_u32_u64(__a) __arm_vreinterpretq_u32_u64(__a)
2403 #define vreinterpretq_u32_u8(__a) __arm_vreinterpretq_u32_u8(__a)
2404 #define vreinterpretq_u64_s16(__a) __arm_vreinterpretq_u64_s16(__a)
2405 #define vreinterpretq_u64_s32(__a) __arm_vreinterpretq_u64_s32(__a)
2406 #define vreinterpretq_u64_s64(__a) __arm_vreinterpretq_u64_s64(__a)
2407 #define vreinterpretq_u64_s8(__a) __arm_vreinterpretq_u64_s8(__a)
2408 #define vreinterpretq_u64_u16(__a) __arm_vreinterpretq_u64_u16(__a)
2409 #define vreinterpretq_u64_u32(__a) __arm_vreinterpretq_u64_u32(__a)
2410 #define vreinterpretq_u64_u8(__a) __arm_vreinterpretq_u64_u8(__a)
2411 #define vreinterpretq_u8_s16(__a) __arm_vreinterpretq_u8_s16(__a)
2412 #define vreinterpretq_u8_s32(__a) __arm_vreinterpretq_u8_s32(__a)
2413 #define vreinterpretq_u8_s64(__a) __arm_vreinterpretq_u8_s64(__a)
2414 #define vreinterpretq_u8_s8(__a) __arm_vreinterpretq_u8_s8(__a)
2415 #define vreinterpretq_u8_u16(__a) __arm_vreinterpretq_u8_u16(__a)
2416 #define vreinterpretq_u8_u32(__a) __arm_vreinterpretq_u8_u32(__a)
2417 #define vreinterpretq_u8_u64(__a) __arm_vreinterpretq_u8_u64(__a)
2418 #define vreinterpretq_s32_f16(__a) __arm_vreinterpretq_s32_f16(__a)
2419 #define vreinterpretq_s32_f32(__a) __arm_vreinterpretq_s32_f32(__a)
2420 #define vreinterpretq_u16_f16(__a) __arm_vreinterpretq_u16_f16(__a)
2421 #define vreinterpretq_u16_f32(__a) __arm_vreinterpretq_u16_f32(__a)
2422 #define vreinterpretq_u32_f16(__a) __arm_vreinterpretq_u32_f16(__a)
2423 #define vreinterpretq_u32_f32(__a) __arm_vreinterpretq_u32_f32(__a)
2424 #define vreinterpretq_u64_f16(__a) __arm_vreinterpretq_u64_f16(__a)
2425 #define vreinterpretq_u64_f32(__a) __arm_vreinterpretq_u64_f32(__a)
2426 #define vreinterpretq_u8_f16(__a) __arm_vreinterpretq_u8_f16(__a)
2427 #define vreinterpretq_u8_f32(__a) __arm_vreinterpretq_u8_f32(__a)
2428 #define vreinterpretq_f16_f32(__a) __arm_vreinterpretq_f16_f32(__a)
2429 #define vreinterpretq_f16_s16(__a) __arm_vreinterpretq_f16_s16(__a)
2430 #define vreinterpretq_f16_s32(__a) __arm_vreinterpretq_f16_s32(__a)
2431 #define vreinterpretq_f16_s64(__a) __arm_vreinterpretq_f16_s64(__a)
2432 #define vreinterpretq_f16_s8(__a) __arm_vreinterpretq_f16_s8(__a)
2433 #define vreinterpretq_f16_u16(__a) __arm_vreinterpretq_f16_u16(__a)
2434 #define vreinterpretq_f16_u32(__a) __arm_vreinterpretq_f16_u32(__a)
2435 #define vreinterpretq_f16_u64(__a) __arm_vreinterpretq_f16_u64(__a)
2436 #define vreinterpretq_f16_u8(__a) __arm_vreinterpretq_f16_u8(__a)
2437 #define vreinterpretq_f32_f16(__a) __arm_vreinterpretq_f32_f16(__a)
2438 #define vreinterpretq_f32_s16(__a) __arm_vreinterpretq_f32_s16(__a)
2439 #define vreinterpretq_f32_s32(__a) __arm_vreinterpretq_f32_s32(__a)
2440 #define vreinterpretq_f32_s64(__a) __arm_vreinterpretq_f32_s64(__a)
2441 #define vreinterpretq_f32_s8(__a) __arm_vreinterpretq_f32_s8(__a)
2442 #define vreinterpretq_f32_u16(__a) __arm_vreinterpretq_f32_u16(__a)
2443 #define vreinterpretq_f32_u32(__a) __arm_vreinterpretq_f32_u32(__a)
2444 #define vreinterpretq_f32_u64(__a) __arm_vreinterpretq_f32_u64(__a)
2445 #define vreinterpretq_f32_u8(__a) __arm_vreinterpretq_f32_u8(__a)
2446 #define vreinterpretq_s16_f16(__a) __arm_vreinterpretq_s16_f16(__a)
2447 #define vreinterpretq_s16_f32(__a) __arm_vreinterpretq_s16_f32(__a)
2448 #define vreinterpretq_s64_f16(__a) __arm_vreinterpretq_s64_f16(__a)
2449 #define vreinterpretq_s64_f32(__a) __arm_vreinterpretq_s64_f32(__a)
2450 #define vreinterpretq_s8_f16(__a) __arm_vreinterpretq_s8_f16(__a)
2451 #define vreinterpretq_s8_f32(__a) __arm_vreinterpretq_s8_f32(__a)
2452 #define vuninitializedq_u8(void) __arm_vuninitializedq_u8(void)
2453 #define vuninitializedq_u16(void) __arm_vuninitializedq_u16(void)
2454 #define vuninitializedq_u32(void) __arm_vuninitializedq_u32(void)
2455 #define vuninitializedq_u64(void) __arm_vuninitializedq_u64(void)
2456 #define vuninitializedq_s8(void) __arm_vuninitializedq_s8(void)
2457 #define vuninitializedq_s16(void) __arm_vuninitializedq_s16(void)
2458 #define vuninitializedq_s32(void) __arm_vuninitializedq_s32(void)
2459 #define vuninitializedq_s64(void) __arm_vuninitializedq_s64(void)
2460 #define vuninitializedq_f16(void) __arm_vuninitializedq_f16(void)
2461 #define vuninitializedq_f32(void) __arm_vuninitializedq_f32(void)
2462 #define vddupq_m_n_u8(__inactive, __a, __imm, __p) __arm_vddupq_m_n_u8(__inactive, __a, __imm, __p)
2463 #define vddupq_m_n_u32(__inactive, __a, __imm, __p) __arm_vddupq_m_n_u32(__inactive, __a, __imm, __p)
2464 #define vddupq_m_n_u16(__inactive, __a, __imm, __p) __arm_vddupq_m_n_u16(__inactive, __a, __imm, __p)
2465 #define vddupq_m_wb_u8(__inactive, __a, __imm, __p) __arm_vddupq_m_wb_u8(__inactive, __a, __imm, __p)
2466 #define vddupq_m_wb_u16(__inactive, __a, __imm, __p) __arm_vddupq_m_wb_u16(__inactive, __a, __imm, __p)
2467 #define vddupq_m_wb_u32(__inactive, __a, __imm, __p) __arm_vddupq_m_wb_u32(__inactive, __a, __imm, __p)
2468 #define vddupq_n_u8(__a, __imm) __arm_vddupq_n_u8(__a, __imm)
2469 #define vddupq_n_u32(__a, __imm) __arm_vddupq_n_u32(__a, __imm)
2470 #define vddupq_n_u16(__a, __imm) __arm_vddupq_n_u16(__a, __imm)
2471 #define vddupq_wb_u8( __a, __imm) __arm_vddupq_wb_u8( __a, __imm)
2472 #define vddupq_wb_u16( __a, __imm) __arm_vddupq_wb_u16( __a, __imm)
2473 #define vddupq_wb_u32( __a, __imm) __arm_vddupq_wb_u32( __a, __imm)
2474 #define vdwdupq_m_n_u8(__inactive, __a, __b, __imm, __p) __arm_vdwdupq_m_n_u8(__inactive, __a, __b, __imm, __p)
2475 #define vdwdupq_m_n_u32(__inactive, __a, __b, __imm, __p) __arm_vdwdupq_m_n_u32(__inactive, __a, __b, __imm, __p)
2476 #define vdwdupq_m_n_u16(__inactive, __a, __b, __imm, __p) __arm_vdwdupq_m_n_u16(__inactive, __a, __b, __imm, __p)
2477 #define vdwdupq_m_wb_u8(__inactive, __a, __b, __imm, __p) __arm_vdwdupq_m_wb_u8(__inactive, __a, __b, __imm, __p)
2478 #define vdwdupq_m_wb_u32(__inactive, __a, __b, __imm, __p) __arm_vdwdupq_m_wb_u32(__inactive, __a, __b, __imm, __p)
2479 #define vdwdupq_m_wb_u16(__inactive, __a, __b, __imm, __p) __arm_vdwdupq_m_wb_u16(__inactive, __a, __b, __imm, __p)
2480 #define vdwdupq_n_u8(__a, __b, __imm) __arm_vdwdupq_n_u8(__a, __b, __imm)
2481 #define vdwdupq_n_u32(__a, __b, __imm) __arm_vdwdupq_n_u32(__a, __b, __imm)
2482 #define vdwdupq_n_u16(__a, __b, __imm) __arm_vdwdupq_n_u16(__a, __b, __imm)
2483 #define vdwdupq_wb_u8( __a, __b, __imm) __arm_vdwdupq_wb_u8( __a, __b, __imm)
2484 #define vdwdupq_wb_u32( __a, __b, __imm) __arm_vdwdupq_wb_u32( __a, __b, __imm)
2485 #define vdwdupq_wb_u16( __a, __b, __imm) __arm_vdwdupq_wb_u16( __a, __b, __imm)
2486 #define vidupq_m_n_u8(__inactive, __a, __imm, __p) __arm_vidupq_m_n_u8(__inactive, __a, __imm, __p)
2487 #define vidupq_m_n_u32(__inactive, __a, __imm, __p) __arm_vidupq_m_n_u32(__inactive, __a, __imm, __p)
2488 #define vidupq_m_n_u16(__inactive, __a, __imm, __p) __arm_vidupq_m_n_u16(__inactive, __a, __imm, __p)
2489 #define vidupq_m_wb_u8(__inactive, __a, __imm, __p) __arm_vidupq_m_wb_u8(__inactive, __a, __imm, __p)
2490 #define vidupq_m_wb_u16(__inactive, __a, __imm, __p) __arm_vidupq_m_wb_u16(__inactive, __a, __imm, __p)
2491 #define vidupq_m_wb_u32(__inactive, __a, __imm, __p) __arm_vidupq_m_wb_u32(__inactive, __a, __imm, __p)
2492 #define vidupq_n_u8(__a, __imm) __arm_vidupq_n_u8(__a, __imm)
2493 #define vidupq_n_u32(__a, __imm) __arm_vidupq_n_u32(__a, __imm)
2494 #define vidupq_n_u16(__a, __imm) __arm_vidupq_n_u16(__a, __imm)
2495 #define vidupq_wb_u8( __a, __imm) __arm_vidupq_wb_u8( __a, __imm)
2496 #define vidupq_wb_u16( __a, __imm) __arm_vidupq_wb_u16( __a, __imm)
2497 #define vidupq_wb_u32( __a, __imm) __arm_vidupq_wb_u32( __a, __imm)
2498 #define viwdupq_m_n_u8(__inactive, __a, __b, __imm, __p) __arm_viwdupq_m_n_u8(__inactive, __a, __b, __imm, __p)
2499 #define viwdupq_m_n_u32(__inactive, __a, __b, __imm, __p) __arm_viwdupq_m_n_u32(__inactive, __a, __b, __imm, __p)
2500 #define viwdupq_m_n_u16(__inactive, __a, __b, __imm, __p) __arm_viwdupq_m_n_u16(__inactive, __a, __b, __imm, __p)
2501 #define viwdupq_m_wb_u8(__inactive, __a, __b, __imm, __p) __arm_viwdupq_m_wb_u8(__inactive, __a, __b, __imm, __p)
2502 #define viwdupq_m_wb_u32(__inactive, __a, __b, __imm, __p) __arm_viwdupq_m_wb_u32(__inactive, __a, __b, __imm, __p)
2503 #define viwdupq_m_wb_u16(__inactive, __a, __b, __imm, __p) __arm_viwdupq_m_wb_u16(__inactive, __a, __b, __imm, __p)
2504 #define viwdupq_n_u8(__a, __b, __imm) __arm_viwdupq_n_u8(__a, __b, __imm)
2505 #define viwdupq_n_u32(__a, __b, __imm) __arm_viwdupq_n_u32(__a, __b, __imm)
2506 #define viwdupq_n_u16(__a, __b, __imm) __arm_viwdupq_n_u16(__a, __b, __imm)
2507 #define viwdupq_wb_u8( __a, __b, __imm) __arm_viwdupq_wb_u8( __a, __b, __imm)
2508 #define viwdupq_wb_u32( __a, __b, __imm) __arm_viwdupq_wb_u32( __a, __b, __imm)
2509 #define viwdupq_wb_u16( __a, __b, __imm) __arm_viwdupq_wb_u16( __a, __b, __imm)
2510 #define vldrdq_gather_base_wb_s64(__addr, __offset) __arm_vldrdq_gather_base_wb_s64(__addr, __offset)
2511 #define vldrdq_gather_base_wb_u64(__addr, __offset) __arm_vldrdq_gather_base_wb_u64(__addr, __offset)
2512 #define vldrdq_gather_base_wb_z_s64(__addr, __offset, __p) __arm_vldrdq_gather_base_wb_z_s64(__addr, __offset, __p)
2513 #define vldrdq_gather_base_wb_z_u64(__addr, __offset, __p) __arm_vldrdq_gather_base_wb_z_u64(__addr, __offset, __p)
2514 #define vldrwq_gather_base_wb_f32(__addr, __offset) __arm_vldrwq_gather_base_wb_f32(__addr, __offset)
2515 #define vldrwq_gather_base_wb_s32(__addr, __offset) __arm_vldrwq_gather_base_wb_s32(__addr, __offset)
2516 #define vldrwq_gather_base_wb_u32(__addr, __offset) __arm_vldrwq_gather_base_wb_u32(__addr, __offset)
2517 #define vldrwq_gather_base_wb_z_f32(__addr, __offset, __p) __arm_vldrwq_gather_base_wb_z_f32(__addr, __offset, __p)
2518 #define vldrwq_gather_base_wb_z_s32(__addr, __offset, __p) __arm_vldrwq_gather_base_wb_z_s32(__addr, __offset, __p)
2519 #define vldrwq_gather_base_wb_z_u32(__addr, __offset, __p) __arm_vldrwq_gather_base_wb_z_u32(__addr, __offset, __p)
2520 #define vstrdq_scatter_base_wb_p_s64(__addr, __offset, __value, __p) __arm_vstrdq_scatter_base_wb_p_s64(__addr, __offset, __value, __p)
2521 #define vstrdq_scatter_base_wb_p_u64(__addr, __offset, __value, __p) __arm_vstrdq_scatter_base_wb_p_u64(__addr, __offset, __value, __p)
2522 #define vstrdq_scatter_base_wb_s64(__addr, __offset, __value) __arm_vstrdq_scatter_base_wb_s64(__addr, __offset, __value)
2523 #define vstrdq_scatter_base_wb_u64(__addr, __offset, __value) __arm_vstrdq_scatter_base_wb_u64(__addr, __offset, __value)
2524 #define vstrwq_scatter_base_wb_p_s32(__addr, __offset, __value, __p) __arm_vstrwq_scatter_base_wb_p_s32(__addr, __offset, __value, __p)
2525 #define vstrwq_scatter_base_wb_p_f32(__addr, __offset, __value, __p) __arm_vstrwq_scatter_base_wb_p_f32(__addr, __offset, __value, __p)
2526 #define vstrwq_scatter_base_wb_p_u32(__addr, __offset, __value, __p) __arm_vstrwq_scatter_base_wb_p_u32(__addr, __offset, __value, __p)
2527 #define vstrwq_scatter_base_wb_s32(__addr, __offset, __value) __arm_vstrwq_scatter_base_wb_s32(__addr, __offset, __value)
2528 #define vstrwq_scatter_base_wb_u32(__addr, __offset, __value) __arm_vstrwq_scatter_base_wb_u32(__addr, __offset, __value)
2529 #define vstrwq_scatter_base_wb_f32(__addr, __offset, __value) __arm_vstrwq_scatter_base_wb_f32(__addr, __offset, __value)
2530 #define vddupq_x_n_u8(__a, __imm, __p) __arm_vddupq_x_n_u8(__a, __imm, __p)
2531 #define vddupq_x_n_u16(__a, __imm, __p) __arm_vddupq_x_n_u16(__a, __imm, __p)
2532 #define vddupq_x_n_u32(__a, __imm, __p) __arm_vddupq_x_n_u32(__a, __imm, __p)
2533 #define vddupq_x_wb_u8(__a, __imm, __p) __arm_vddupq_x_wb_u8(__a, __imm, __p)
2534 #define vddupq_x_wb_u16(__a, __imm, __p) __arm_vddupq_x_wb_u16(__a, __imm, __p)
2535 #define vddupq_x_wb_u32(__a, __imm, __p) __arm_vddupq_x_wb_u32(__a, __imm, __p)
2536 #define vdwdupq_x_n_u8(__a, __b, __imm, __p) __arm_vdwdupq_x_n_u8(__a, __b, __imm, __p)
2537 #define vdwdupq_x_n_u16(__a, __b, __imm, __p) __arm_vdwdupq_x_n_u16(__a, __b, __imm, __p)
2538 #define vdwdupq_x_n_u32(__a, __b, __imm, __p) __arm_vdwdupq_x_n_u32(__a, __b, __imm, __p)
2539 #define vdwdupq_x_wb_u8(__a, __b, __imm, __p) __arm_vdwdupq_x_wb_u8(__a, __b, __imm, __p)
2540 #define vdwdupq_x_wb_u16(__a, __b, __imm, __p) __arm_vdwdupq_x_wb_u16(__a, __b, __imm, __p)
2541 #define vdwdupq_x_wb_u32(__a, __b, __imm, __p) __arm_vdwdupq_x_wb_u32(__a, __b, __imm, __p)
2542 #define vidupq_x_n_u8(__a, __imm, __p) __arm_vidupq_x_n_u8(__a, __imm, __p)
2543 #define vidupq_x_n_u16(__a, __imm, __p) __arm_vidupq_x_n_u16(__a, __imm, __p)
2544 #define vidupq_x_n_u32(__a, __imm, __p) __arm_vidupq_x_n_u32(__a, __imm, __p)
2545 #define vidupq_x_wb_u8(__a, __imm, __p) __arm_vidupq_x_wb_u8(__a, __imm, __p)
2546 #define vidupq_x_wb_u16(__a, __imm, __p) __arm_vidupq_x_wb_u16(__a, __imm, __p)
2547 #define vidupq_x_wb_u32(__a, __imm, __p) __arm_vidupq_x_wb_u32(__a, __imm, __p)
2548 #define viwdupq_x_n_u8(__a, __b, __imm, __p) __arm_viwdupq_x_n_u8(__a, __b, __imm, __p)
2549 #define viwdupq_x_n_u16(__a, __b, __imm, __p) __arm_viwdupq_x_n_u16(__a, __b, __imm, __p)
2550 #define viwdupq_x_n_u32(__a, __b, __imm, __p) __arm_viwdupq_x_n_u32(__a, __b, __imm, __p)
2551 #define viwdupq_x_wb_u8(__a, __b, __imm, __p) __arm_viwdupq_x_wb_u8(__a, __b, __imm, __p)
2552 #define viwdupq_x_wb_u16(__a, __b, __imm, __p) __arm_viwdupq_x_wb_u16(__a, __b, __imm, __p)
2553 #define viwdupq_x_wb_u32(__a, __b, __imm, __p) __arm_viwdupq_x_wb_u32(__a, __b, __imm, __p)
2554 #define vdupq_x_n_s8(__a, __p) __arm_vdupq_x_n_s8(__a, __p)
2555 #define vdupq_x_n_s16(__a, __p) __arm_vdupq_x_n_s16(__a, __p)
2556 #define vdupq_x_n_s32(__a, __p) __arm_vdupq_x_n_s32(__a, __p)
2557 #define vdupq_x_n_u8(__a, __p) __arm_vdupq_x_n_u8(__a, __p)
2558 #define vdupq_x_n_u16(__a, __p) __arm_vdupq_x_n_u16(__a, __p)
2559 #define vdupq_x_n_u32(__a, __p) __arm_vdupq_x_n_u32(__a, __p)
2560 #define vminq_x_s8(__a, __b, __p) __arm_vminq_x_s8(__a, __b, __p)
2561 #define vminq_x_s16(__a, __b, __p) __arm_vminq_x_s16(__a, __b, __p)
2562 #define vminq_x_s32(__a, __b, __p) __arm_vminq_x_s32(__a, __b, __p)
2563 #define vminq_x_u8(__a, __b, __p) __arm_vminq_x_u8(__a, __b, __p)
2564 #define vminq_x_u16(__a, __b, __p) __arm_vminq_x_u16(__a, __b, __p)
2565 #define vminq_x_u32(__a, __b, __p) __arm_vminq_x_u32(__a, __b, __p)
2566 #define vmaxq_x_s8(__a, __b, __p) __arm_vmaxq_x_s8(__a, __b, __p)
2567 #define vmaxq_x_s16(__a, __b, __p) __arm_vmaxq_x_s16(__a, __b, __p)
2568 #define vmaxq_x_s32(__a, __b, __p) __arm_vmaxq_x_s32(__a, __b, __p)
2569 #define vmaxq_x_u8(__a, __b, __p) __arm_vmaxq_x_u8(__a, __b, __p)
2570 #define vmaxq_x_u16(__a, __b, __p) __arm_vmaxq_x_u16(__a, __b, __p)
2571 #define vmaxq_x_u32(__a, __b, __p) __arm_vmaxq_x_u32(__a, __b, __p)
2572 #define vabdq_x_s8(__a, __b, __p) __arm_vabdq_x_s8(__a, __b, __p)
2573 #define vabdq_x_s16(__a, __b, __p) __arm_vabdq_x_s16(__a, __b, __p)
2574 #define vabdq_x_s32(__a, __b, __p) __arm_vabdq_x_s32(__a, __b, __p)
2575 #define vabdq_x_u8(__a, __b, __p) __arm_vabdq_x_u8(__a, __b, __p)
2576 #define vabdq_x_u16(__a, __b, __p) __arm_vabdq_x_u16(__a, __b, __p)
2577 #define vabdq_x_u32(__a, __b, __p) __arm_vabdq_x_u32(__a, __b, __p)
2578 #define vabsq_x_s8(__a, __p) __arm_vabsq_x_s8(__a, __p)
2579 #define vabsq_x_s16(__a, __p) __arm_vabsq_x_s16(__a, __p)
2580 #define vabsq_x_s32(__a, __p) __arm_vabsq_x_s32(__a, __p)
2581 #define vaddq_x_s8(__a, __b, __p) __arm_vaddq_x_s8(__a, __b, __p)
2582 #define vaddq_x_s16(__a, __b, __p) __arm_vaddq_x_s16(__a, __b, __p)
2583 #define vaddq_x_s32(__a, __b, __p) __arm_vaddq_x_s32(__a, __b, __p)
2584 #define vaddq_x_n_s8(__a, __b, __p) __arm_vaddq_x_n_s8(__a, __b, __p)
2585 #define vaddq_x_n_s16(__a, __b, __p) __arm_vaddq_x_n_s16(__a, __b, __p)
2586 #define vaddq_x_n_s32(__a, __b, __p) __arm_vaddq_x_n_s32(__a, __b, __p)
2587 #define vaddq_x_u8(__a, __b, __p) __arm_vaddq_x_u8(__a, __b, __p)
2588 #define vaddq_x_u16(__a, __b, __p) __arm_vaddq_x_u16(__a, __b, __p)
2589 #define vaddq_x_u32(__a, __b, __p) __arm_vaddq_x_u32(__a, __b, __p)
2590 #define vaddq_x_n_u8(__a, __b, __p) __arm_vaddq_x_n_u8(__a, __b, __p)
2591 #define vaddq_x_n_u16(__a, __b, __p) __arm_vaddq_x_n_u16(__a, __b, __p)
2592 #define vaddq_x_n_u32(__a, __b, __p) __arm_vaddq_x_n_u32(__a, __b, __p)
2593 #define vclsq_x_s8(__a, __p) __arm_vclsq_x_s8(__a, __p)
2594 #define vclsq_x_s16(__a, __p) __arm_vclsq_x_s16(__a, __p)
2595 #define vclsq_x_s32(__a, __p) __arm_vclsq_x_s32(__a, __p)
2596 #define vclzq_x_s8(__a, __p) __arm_vclzq_x_s8(__a, __p)
2597 #define vclzq_x_s16(__a, __p) __arm_vclzq_x_s16(__a, __p)
2598 #define vclzq_x_s32(__a, __p) __arm_vclzq_x_s32(__a, __p)
2599 #define vclzq_x_u8(__a, __p) __arm_vclzq_x_u8(__a, __p)
2600 #define vclzq_x_u16(__a, __p) __arm_vclzq_x_u16(__a, __p)
2601 #define vclzq_x_u32(__a, __p) __arm_vclzq_x_u32(__a, __p)
2602 #define vnegq_x_s8(__a, __p) __arm_vnegq_x_s8(__a, __p)
2603 #define vnegq_x_s16(__a, __p) __arm_vnegq_x_s16(__a, __p)
2604 #define vnegq_x_s32(__a, __p) __arm_vnegq_x_s32(__a, __p)
2605 #define vmulhq_x_s8(__a, __b, __p) __arm_vmulhq_x_s8(__a, __b, __p)
2606 #define vmulhq_x_s16(__a, __b, __p) __arm_vmulhq_x_s16(__a, __b, __p)
2607 #define vmulhq_x_s32(__a, __b, __p) __arm_vmulhq_x_s32(__a, __b, __p)
2608 #define vmulhq_x_u8(__a, __b, __p) __arm_vmulhq_x_u8(__a, __b, __p)
2609 #define vmulhq_x_u16(__a, __b, __p) __arm_vmulhq_x_u16(__a, __b, __p)
2610 #define vmulhq_x_u32(__a, __b, __p) __arm_vmulhq_x_u32(__a, __b, __p)
2611 #define vmullbq_poly_x_p8(__a, __b, __p) __arm_vmullbq_poly_x_p8(__a, __b, __p)
2612 #define vmullbq_poly_x_p16(__a, __b, __p) __arm_vmullbq_poly_x_p16(__a, __b, __p)
2613 #define vmullbq_int_x_s8(__a, __b, __p) __arm_vmullbq_int_x_s8(__a, __b, __p)
2614 #define vmullbq_int_x_s16(__a, __b, __p) __arm_vmullbq_int_x_s16(__a, __b, __p)
2615 #define vmullbq_int_x_s32(__a, __b, __p) __arm_vmullbq_int_x_s32(__a, __b, __p)
2616 #define vmullbq_int_x_u8(__a, __b, __p) __arm_vmullbq_int_x_u8(__a, __b, __p)
2617 #define vmullbq_int_x_u16(__a, __b, __p) __arm_vmullbq_int_x_u16(__a, __b, __p)
2618 #define vmullbq_int_x_u32(__a, __b, __p) __arm_vmullbq_int_x_u32(__a, __b, __p)
2619 #define vmulltq_poly_x_p8(__a, __b, __p) __arm_vmulltq_poly_x_p8(__a, __b, __p)
2620 #define vmulltq_poly_x_p16(__a, __b, __p) __arm_vmulltq_poly_x_p16(__a, __b, __p)
2621 #define vmulltq_int_x_s8(__a, __b, __p) __arm_vmulltq_int_x_s8(__a, __b, __p)
2622 #define vmulltq_int_x_s16(__a, __b, __p) __arm_vmulltq_int_x_s16(__a, __b, __p)
2623 #define vmulltq_int_x_s32(__a, __b, __p) __arm_vmulltq_int_x_s32(__a, __b, __p)
2624 #define vmulltq_int_x_u8(__a, __b, __p) __arm_vmulltq_int_x_u8(__a, __b, __p)
2625 #define vmulltq_int_x_u16(__a, __b, __p) __arm_vmulltq_int_x_u16(__a, __b, __p)
2626 #define vmulltq_int_x_u32(__a, __b, __p) __arm_vmulltq_int_x_u32(__a, __b, __p)
2627 #define vmulq_x_s8(__a, __b, __p) __arm_vmulq_x_s8(__a, __b, __p)
2628 #define vmulq_x_s16(__a, __b, __p) __arm_vmulq_x_s16(__a, __b, __p)
2629 #define vmulq_x_s32(__a, __b, __p) __arm_vmulq_x_s32(__a, __b, __p)
2630 #define vmulq_x_n_s8(__a, __b, __p) __arm_vmulq_x_n_s8(__a, __b, __p)
2631 #define vmulq_x_n_s16(__a, __b, __p) __arm_vmulq_x_n_s16(__a, __b, __p)
2632 #define vmulq_x_n_s32(__a, __b, __p) __arm_vmulq_x_n_s32(__a, __b, __p)
2633 #define vmulq_x_u8(__a, __b, __p) __arm_vmulq_x_u8(__a, __b, __p)
2634 #define vmulq_x_u16(__a, __b, __p) __arm_vmulq_x_u16(__a, __b, __p)
2635 #define vmulq_x_u32(__a, __b, __p) __arm_vmulq_x_u32(__a, __b, __p)
2636 #define vmulq_x_n_u8(__a, __b, __p) __arm_vmulq_x_n_u8(__a, __b, __p)
2637 #define vmulq_x_n_u16(__a, __b, __p) __arm_vmulq_x_n_u16(__a, __b, __p)
2638 #define vmulq_x_n_u32(__a, __b, __p) __arm_vmulq_x_n_u32(__a, __b, __p)
2639 #define vsubq_x_s8(__a, __b, __p) __arm_vsubq_x_s8(__a, __b, __p)
2640 #define vsubq_x_s16(__a, __b, __p) __arm_vsubq_x_s16(__a, __b, __p)
2641 #define vsubq_x_s32(__a, __b, __p) __arm_vsubq_x_s32(__a, __b, __p)
2642 #define vsubq_x_n_s8(__a, __b, __p) __arm_vsubq_x_n_s8(__a, __b, __p)
2643 #define vsubq_x_n_s16(__a, __b, __p) __arm_vsubq_x_n_s16(__a, __b, __p)
2644 #define vsubq_x_n_s32(__a, __b, __p) __arm_vsubq_x_n_s32(__a, __b, __p)
2645 #define vsubq_x_u8(__a, __b, __p) __arm_vsubq_x_u8(__a, __b, __p)
2646 #define vsubq_x_u16(__a, __b, __p) __arm_vsubq_x_u16(__a, __b, __p)
2647 #define vsubq_x_u32(__a, __b, __p) __arm_vsubq_x_u32(__a, __b, __p)
2648 #define vsubq_x_n_u8(__a, __b, __p) __arm_vsubq_x_n_u8(__a, __b, __p)
2649 #define vsubq_x_n_u16(__a, __b, __p) __arm_vsubq_x_n_u16(__a, __b, __p)
2650 #define vsubq_x_n_u32(__a, __b, __p) __arm_vsubq_x_n_u32(__a, __b, __p)
2651 #define vcaddq_rot90_x_s8(__a, __b, __p) __arm_vcaddq_rot90_x_s8(__a, __b, __p)
2652 #define vcaddq_rot90_x_s16(__a, __b, __p) __arm_vcaddq_rot90_x_s16(__a, __b, __p)
2653 #define vcaddq_rot90_x_s32(__a, __b, __p) __arm_vcaddq_rot90_x_s32(__a, __b, __p)
2654 #define vcaddq_rot90_x_u8(__a, __b, __p) __arm_vcaddq_rot90_x_u8(__a, __b, __p)
2655 #define vcaddq_rot90_x_u16(__a, __b, __p) __arm_vcaddq_rot90_x_u16(__a, __b, __p)
2656 #define vcaddq_rot90_x_u32(__a, __b, __p) __arm_vcaddq_rot90_x_u32(__a, __b, __p)
2657 #define vcaddq_rot270_x_s8(__a, __b, __p) __arm_vcaddq_rot270_x_s8(__a, __b, __p)
2658 #define vcaddq_rot270_x_s16(__a, __b, __p) __arm_vcaddq_rot270_x_s16(__a, __b, __p)
2659 #define vcaddq_rot270_x_s32(__a, __b, __p) __arm_vcaddq_rot270_x_s32(__a, __b, __p)
2660 #define vcaddq_rot270_x_u8(__a, __b, __p) __arm_vcaddq_rot270_x_u8(__a, __b, __p)
2661 #define vcaddq_rot270_x_u16(__a, __b, __p) __arm_vcaddq_rot270_x_u16(__a, __b, __p)
2662 #define vcaddq_rot270_x_u32(__a, __b, __p) __arm_vcaddq_rot270_x_u32(__a, __b, __p)
2663 #define vhaddq_x_n_s8(__a, __b, __p) __arm_vhaddq_x_n_s8(__a, __b, __p)
2664 #define vhaddq_x_n_s16(__a, __b, __p) __arm_vhaddq_x_n_s16(__a, __b, __p)
2665 #define vhaddq_x_n_s32(__a, __b, __p) __arm_vhaddq_x_n_s32(__a, __b, __p)
2666 #define vhaddq_x_n_u8(__a, __b, __p) __arm_vhaddq_x_n_u8(__a, __b, __p)
2667 #define vhaddq_x_n_u16(__a, __b, __p) __arm_vhaddq_x_n_u16(__a, __b, __p)
2668 #define vhaddq_x_n_u32(__a, __b, __p) __arm_vhaddq_x_n_u32(__a, __b, __p)
2669 #define vhaddq_x_s8(__a, __b, __p) __arm_vhaddq_x_s8(__a, __b, __p)
2670 #define vhaddq_x_s16(__a, __b, __p) __arm_vhaddq_x_s16(__a, __b, __p)
2671 #define vhaddq_x_s32(__a, __b, __p) __arm_vhaddq_x_s32(__a, __b, __p)
2672 #define vhaddq_x_u8(__a, __b, __p) __arm_vhaddq_x_u8(__a, __b, __p)
2673 #define vhaddq_x_u16(__a, __b, __p) __arm_vhaddq_x_u16(__a, __b, __p)
2674 #define vhaddq_x_u32(__a, __b, __p) __arm_vhaddq_x_u32(__a, __b, __p)
2675 #define vhcaddq_rot90_x_s8(__a, __b, __p) __arm_vhcaddq_rot90_x_s8(__a, __b, __p)
2676 #define vhcaddq_rot90_x_s16(__a, __b, __p) __arm_vhcaddq_rot90_x_s16(__a, __b, __p)
2677 #define vhcaddq_rot90_x_s32(__a, __b, __p) __arm_vhcaddq_rot90_x_s32(__a, __b, __p)
2678 #define vhcaddq_rot270_x_s8(__a, __b, __p) __arm_vhcaddq_rot270_x_s8(__a, __b, __p)
2679 #define vhcaddq_rot270_x_s16(__a, __b, __p) __arm_vhcaddq_rot270_x_s16(__a, __b, __p)
2680 #define vhcaddq_rot270_x_s32(__a, __b, __p) __arm_vhcaddq_rot270_x_s32(__a, __b, __p)
2681 #define vhsubq_x_n_s8(__a, __b, __p) __arm_vhsubq_x_n_s8(__a, __b, __p)
2682 #define vhsubq_x_n_s16(__a, __b, __p) __arm_vhsubq_x_n_s16(__a, __b, __p)
2683 #define vhsubq_x_n_s32(__a, __b, __p) __arm_vhsubq_x_n_s32(__a, __b, __p)
2684 #define vhsubq_x_n_u8(__a, __b, __p) __arm_vhsubq_x_n_u8(__a, __b, __p)
2685 #define vhsubq_x_n_u16(__a, __b, __p) __arm_vhsubq_x_n_u16(__a, __b, __p)
2686 #define vhsubq_x_n_u32(__a, __b, __p) __arm_vhsubq_x_n_u32(__a, __b, __p)
2687 #define vhsubq_x_s8(__a, __b, __p) __arm_vhsubq_x_s8(__a, __b, __p)
2688 #define vhsubq_x_s16(__a, __b, __p) __arm_vhsubq_x_s16(__a, __b, __p)
2689 #define vhsubq_x_s32(__a, __b, __p) __arm_vhsubq_x_s32(__a, __b, __p)
2690 #define vhsubq_x_u8(__a, __b, __p) __arm_vhsubq_x_u8(__a, __b, __p)
2691 #define vhsubq_x_u16(__a, __b, __p) __arm_vhsubq_x_u16(__a, __b, __p)
2692 #define vhsubq_x_u32(__a, __b, __p) __arm_vhsubq_x_u32(__a, __b, __p)
2693 #define vrhaddq_x_s8(__a, __b, __p) __arm_vrhaddq_x_s8(__a, __b, __p)
2694 #define vrhaddq_x_s16(__a, __b, __p) __arm_vrhaddq_x_s16(__a, __b, __p)
2695 #define vrhaddq_x_s32(__a, __b, __p) __arm_vrhaddq_x_s32(__a, __b, __p)
2696 #define vrhaddq_x_u8(__a, __b, __p) __arm_vrhaddq_x_u8(__a, __b, __p)
2697 #define vrhaddq_x_u16(__a, __b, __p) __arm_vrhaddq_x_u16(__a, __b, __p)
2698 #define vrhaddq_x_u32(__a, __b, __p) __arm_vrhaddq_x_u32(__a, __b, __p)
2699 #define vrmulhq_x_s8(__a, __b, __p) __arm_vrmulhq_x_s8(__a, __b, __p)
2700 #define vrmulhq_x_s16(__a, __b, __p) __arm_vrmulhq_x_s16(__a, __b, __p)
2701 #define vrmulhq_x_s32(__a, __b, __p) __arm_vrmulhq_x_s32(__a, __b, __p)
2702 #define vrmulhq_x_u8(__a, __b, __p) __arm_vrmulhq_x_u8(__a, __b, __p)
2703 #define vrmulhq_x_u16(__a, __b, __p) __arm_vrmulhq_x_u16(__a, __b, __p)
2704 #define vrmulhq_x_u32(__a, __b, __p) __arm_vrmulhq_x_u32(__a, __b, __p)
2705 #define vandq_x_s8(__a, __b, __p) __arm_vandq_x_s8(__a, __b, __p)
2706 #define vandq_x_s16(__a, __b, __p) __arm_vandq_x_s16(__a, __b, __p)
2707 #define vandq_x_s32(__a, __b, __p) __arm_vandq_x_s32(__a, __b, __p)
2708 #define vandq_x_u8(__a, __b, __p) __arm_vandq_x_u8(__a, __b, __p)
2709 #define vandq_x_u16(__a, __b, __p) __arm_vandq_x_u16(__a, __b, __p)
2710 #define vandq_x_u32(__a, __b, __p) __arm_vandq_x_u32(__a, __b, __p)
2711 #define vbicq_x_s8(__a, __b, __p) __arm_vbicq_x_s8(__a, __b, __p)
2712 #define vbicq_x_s16(__a, __b, __p) __arm_vbicq_x_s16(__a, __b, __p)
2713 #define vbicq_x_s32(__a, __b, __p) __arm_vbicq_x_s32(__a, __b, __p)
2714 #define vbicq_x_u8(__a, __b, __p) __arm_vbicq_x_u8(__a, __b, __p)
2715 #define vbicq_x_u16(__a, __b, __p) __arm_vbicq_x_u16(__a, __b, __p)
2716 #define vbicq_x_u32(__a, __b, __p) __arm_vbicq_x_u32(__a, __b, __p)
2717 #define vbrsrq_x_n_s8(__a, __b, __p) __arm_vbrsrq_x_n_s8(__a, __b, __p)
2718 #define vbrsrq_x_n_s16(__a, __b, __p) __arm_vbrsrq_x_n_s16(__a, __b, __p)
2719 #define vbrsrq_x_n_s32(__a, __b, __p) __arm_vbrsrq_x_n_s32(__a, __b, __p)
2720 #define vbrsrq_x_n_u8(__a, __b, __p) __arm_vbrsrq_x_n_u8(__a, __b, __p)
2721 #define vbrsrq_x_n_u16(__a, __b, __p) __arm_vbrsrq_x_n_u16(__a, __b, __p)
2722 #define vbrsrq_x_n_u32(__a, __b, __p) __arm_vbrsrq_x_n_u32(__a, __b, __p)
2723 #define veorq_x_s8(__a, __b, __p) __arm_veorq_x_s8(__a, __b, __p)
2724 #define veorq_x_s16(__a, __b, __p) __arm_veorq_x_s16(__a, __b, __p)
2725 #define veorq_x_s32(__a, __b, __p) __arm_veorq_x_s32(__a, __b, __p)
2726 #define veorq_x_u8(__a, __b, __p) __arm_veorq_x_u8(__a, __b, __p)
2727 #define veorq_x_u16(__a, __b, __p) __arm_veorq_x_u16(__a, __b, __p)
2728 #define veorq_x_u32(__a, __b, __p) __arm_veorq_x_u32(__a, __b, __p)
2729 #define vmovlbq_x_s8(__a, __p) __arm_vmovlbq_x_s8(__a, __p)
2730 #define vmovlbq_x_s16(__a, __p) __arm_vmovlbq_x_s16(__a, __p)
2731 #define vmovlbq_x_u8(__a, __p) __arm_vmovlbq_x_u8(__a, __p)
2732 #define vmovlbq_x_u16(__a, __p) __arm_vmovlbq_x_u16(__a, __p)
2733 #define vmovltq_x_s8(__a, __p) __arm_vmovltq_x_s8(__a, __p)
2734 #define vmovltq_x_s16(__a, __p) __arm_vmovltq_x_s16(__a, __p)
2735 #define vmovltq_x_u8(__a, __p) __arm_vmovltq_x_u8(__a, __p)
2736 #define vmovltq_x_u16(__a, __p) __arm_vmovltq_x_u16(__a, __p)
2737 #define vmvnq_x_s8(__a, __p) __arm_vmvnq_x_s8(__a, __p)
2738 #define vmvnq_x_s16(__a, __p) __arm_vmvnq_x_s16(__a, __p)
2739 #define vmvnq_x_s32(__a, __p) __arm_vmvnq_x_s32(__a, __p)
2740 #define vmvnq_x_u8(__a, __p) __arm_vmvnq_x_u8(__a, __p)
2741 #define vmvnq_x_u16(__a, __p) __arm_vmvnq_x_u16(__a, __p)
2742 #define vmvnq_x_u32(__a, __p) __arm_vmvnq_x_u32(__a, __p)
2743 #define vmvnq_x_n_s16( __imm, __p) __arm_vmvnq_x_n_s16( __imm, __p)
2744 #define vmvnq_x_n_s32( __imm, __p) __arm_vmvnq_x_n_s32( __imm, __p)
2745 #define vmvnq_x_n_u16( __imm, __p) __arm_vmvnq_x_n_u16( __imm, __p)
2746 #define vmvnq_x_n_u32( __imm, __p) __arm_vmvnq_x_n_u32( __imm, __p)
2747 #define vornq_x_s8(__a, __b, __p) __arm_vornq_x_s8(__a, __b, __p)
2748 #define vornq_x_s16(__a, __b, __p) __arm_vornq_x_s16(__a, __b, __p)
2749 #define vornq_x_s32(__a, __b, __p) __arm_vornq_x_s32(__a, __b, __p)
2750 #define vornq_x_u8(__a, __b, __p) __arm_vornq_x_u8(__a, __b, __p)
2751 #define vornq_x_u16(__a, __b, __p) __arm_vornq_x_u16(__a, __b, __p)
2752 #define vornq_x_u32(__a, __b, __p) __arm_vornq_x_u32(__a, __b, __p)
2753 #define vorrq_x_s8(__a, __b, __p) __arm_vorrq_x_s8(__a, __b, __p)
2754 #define vorrq_x_s16(__a, __b, __p) __arm_vorrq_x_s16(__a, __b, __p)
2755 #define vorrq_x_s32(__a, __b, __p) __arm_vorrq_x_s32(__a, __b, __p)
2756 #define vorrq_x_u8(__a, __b, __p) __arm_vorrq_x_u8(__a, __b, __p)
2757 #define vorrq_x_u16(__a, __b, __p) __arm_vorrq_x_u16(__a, __b, __p)
2758 #define vorrq_x_u32(__a, __b, __p) __arm_vorrq_x_u32(__a, __b, __p)
2759 #define vrev16q_x_s8(__a, __p) __arm_vrev16q_x_s8(__a, __p)
2760 #define vrev16q_x_u8(__a, __p) __arm_vrev16q_x_u8(__a, __p)
2761 #define vrev32q_x_s8(__a, __p) __arm_vrev32q_x_s8(__a, __p)
2762 #define vrev32q_x_s16(__a, __p) __arm_vrev32q_x_s16(__a, __p)
2763 #define vrev32q_x_u8(__a, __p) __arm_vrev32q_x_u8(__a, __p)
2764 #define vrev32q_x_u16(__a, __p) __arm_vrev32q_x_u16(__a, __p)
2765 #define vrev64q_x_s8(__a, __p) __arm_vrev64q_x_s8(__a, __p)
2766 #define vrev64q_x_s16(__a, __p) __arm_vrev64q_x_s16(__a, __p)
2767 #define vrev64q_x_s32(__a, __p) __arm_vrev64q_x_s32(__a, __p)
2768 #define vrev64q_x_u8(__a, __p) __arm_vrev64q_x_u8(__a, __p)
2769 #define vrev64q_x_u16(__a, __p) __arm_vrev64q_x_u16(__a, __p)
2770 #define vrev64q_x_u32(__a, __p) __arm_vrev64q_x_u32(__a, __p)
2771 #define vrshlq_x_s8(__a, __b, __p) __arm_vrshlq_x_s8(__a, __b, __p)
2772 #define vrshlq_x_s16(__a, __b, __p) __arm_vrshlq_x_s16(__a, __b, __p)
2773 #define vrshlq_x_s32(__a, __b, __p) __arm_vrshlq_x_s32(__a, __b, __p)
2774 #define vrshlq_x_u8(__a, __b, __p) __arm_vrshlq_x_u8(__a, __b, __p)
2775 #define vrshlq_x_u16(__a, __b, __p) __arm_vrshlq_x_u16(__a, __b, __p)
2776 #define vrshlq_x_u32(__a, __b, __p) __arm_vrshlq_x_u32(__a, __b, __p)
2777 #define vshllbq_x_n_s8(__a, __imm, __p) __arm_vshllbq_x_n_s8(__a, __imm, __p)
2778 #define vshllbq_x_n_s16(__a, __imm, __p) __arm_vshllbq_x_n_s16(__a, __imm, __p)
2779 #define vshllbq_x_n_u8(__a, __imm, __p) __arm_vshllbq_x_n_u8(__a, __imm, __p)
2780 #define vshllbq_x_n_u16(__a, __imm, __p) __arm_vshllbq_x_n_u16(__a, __imm, __p)
2781 #define vshlltq_x_n_s8(__a, __imm, __p) __arm_vshlltq_x_n_s8(__a, __imm, __p)
2782 #define vshlltq_x_n_s16(__a, __imm, __p) __arm_vshlltq_x_n_s16(__a, __imm, __p)
2783 #define vshlltq_x_n_u8(__a, __imm, __p) __arm_vshlltq_x_n_u8(__a, __imm, __p)
2784 #define vshlltq_x_n_u16(__a, __imm, __p) __arm_vshlltq_x_n_u16(__a, __imm, __p)
2785 #define vshlq_x_s8(__a, __b, __p) __arm_vshlq_x_s8(__a, __b, __p)
2786 #define vshlq_x_s16(__a, __b, __p) __arm_vshlq_x_s16(__a, __b, __p)
2787 #define vshlq_x_s32(__a, __b, __p) __arm_vshlq_x_s32(__a, __b, __p)
2788 #define vshlq_x_u8(__a, __b, __p) __arm_vshlq_x_u8(__a, __b, __p)
2789 #define vshlq_x_u16(__a, __b, __p) __arm_vshlq_x_u16(__a, __b, __p)
2790 #define vshlq_x_u32(__a, __b, __p) __arm_vshlq_x_u32(__a, __b, __p)
2791 #define vshlq_x_n_s8(__a, __imm, __p) __arm_vshlq_x_n_s8(__a, __imm, __p)
2792 #define vshlq_x_n_s16(__a, __imm, __p) __arm_vshlq_x_n_s16(__a, __imm, __p)
2793 #define vshlq_x_n_s32(__a, __imm, __p) __arm_vshlq_x_n_s32(__a, __imm, __p)
2794 #define vshlq_x_n_u8(__a, __imm, __p) __arm_vshlq_x_n_u8(__a, __imm, __p)
2795 #define vshlq_x_n_u16(__a, __imm, __p) __arm_vshlq_x_n_u16(__a, __imm, __p)
2796 #define vshlq_x_n_u32(__a, __imm, __p) __arm_vshlq_x_n_u32(__a, __imm, __p)
2797 #define vrshrq_x_n_s8(__a, __imm, __p) __arm_vrshrq_x_n_s8(__a, __imm, __p)
2798 #define vrshrq_x_n_s16(__a, __imm, __p) __arm_vrshrq_x_n_s16(__a, __imm, __p)
2799 #define vrshrq_x_n_s32(__a, __imm, __p) __arm_vrshrq_x_n_s32(__a, __imm, __p)
2800 #define vrshrq_x_n_u8(__a, __imm, __p) __arm_vrshrq_x_n_u8(__a, __imm, __p)
2801 #define vrshrq_x_n_u16(__a, __imm, __p) __arm_vrshrq_x_n_u16(__a, __imm, __p)
2802 #define vrshrq_x_n_u32(__a, __imm, __p) __arm_vrshrq_x_n_u32(__a, __imm, __p)
2803 #define vshrq_x_n_s8(__a, __imm, __p) __arm_vshrq_x_n_s8(__a, __imm, __p)
2804 #define vshrq_x_n_s16(__a, __imm, __p) __arm_vshrq_x_n_s16(__a, __imm, __p)
2805 #define vshrq_x_n_s32(__a, __imm, __p) __arm_vshrq_x_n_s32(__a, __imm, __p)
2806 #define vshrq_x_n_u8(__a, __imm, __p) __arm_vshrq_x_n_u8(__a, __imm, __p)
2807 #define vshrq_x_n_u16(__a, __imm, __p) __arm_vshrq_x_n_u16(__a, __imm, __p)
2808 #define vshrq_x_n_u32(__a, __imm, __p) __arm_vshrq_x_n_u32(__a, __imm, __p)
2809 #define vdupq_x_n_f16(__a, __p) __arm_vdupq_x_n_f16(__a, __p)
2810 #define vdupq_x_n_f32(__a, __p) __arm_vdupq_x_n_f32(__a, __p)
2811 #define vminnmq_x_f16(__a, __b, __p) __arm_vminnmq_x_f16(__a, __b, __p)
2812 #define vminnmq_x_f32(__a, __b, __p) __arm_vminnmq_x_f32(__a, __b, __p)
2813 #define vmaxnmq_x_f16(__a, __b, __p) __arm_vmaxnmq_x_f16(__a, __b, __p)
2814 #define vmaxnmq_x_f32(__a, __b, __p) __arm_vmaxnmq_x_f32(__a, __b, __p)
2815 #define vabdq_x_f16(__a, __b, __p) __arm_vabdq_x_f16(__a, __b, __p)
2816 #define vabdq_x_f32(__a, __b, __p) __arm_vabdq_x_f32(__a, __b, __p)
2817 #define vabsq_x_f16(__a, __p) __arm_vabsq_x_f16(__a, __p)
2818 #define vabsq_x_f32(__a, __p) __arm_vabsq_x_f32(__a, __p)
2819 #define vaddq_x_f16(__a, __b, __p) __arm_vaddq_x_f16(__a, __b, __p)
2820 #define vaddq_x_f32(__a, __b, __p) __arm_vaddq_x_f32(__a, __b, __p)
2821 #define vaddq_x_n_f16(__a, __b, __p) __arm_vaddq_x_n_f16(__a, __b, __p)
2822 #define vaddq_x_n_f32(__a, __b, __p) __arm_vaddq_x_n_f32(__a, __b, __p)
2823 #define vnegq_x_f16(__a, __p) __arm_vnegq_x_f16(__a, __p)
2824 #define vnegq_x_f32(__a, __p) __arm_vnegq_x_f32(__a, __p)
2825 #define vmulq_x_f16(__a, __b, __p) __arm_vmulq_x_f16(__a, __b, __p)
2826 #define vmulq_x_f32(__a, __b, __p) __arm_vmulq_x_f32(__a, __b, __p)
2827 #define vmulq_x_n_f16(__a, __b, __p) __arm_vmulq_x_n_f16(__a, __b, __p)
2828 #define vmulq_x_n_f32(__a, __b, __p) __arm_vmulq_x_n_f32(__a, __b, __p)
2829 #define vsubq_x_f16(__a, __b, __p) __arm_vsubq_x_f16(__a, __b, __p)
2830 #define vsubq_x_f32(__a, __b, __p) __arm_vsubq_x_f32(__a, __b, __p)
2831 #define vsubq_x_n_f16(__a, __b, __p) __arm_vsubq_x_n_f16(__a, __b, __p)
2832 #define vsubq_x_n_f32(__a, __b, __p) __arm_vsubq_x_n_f32(__a, __b, __p)
2833 #define vcaddq_rot90_x_f16(__a, __b, __p) __arm_vcaddq_rot90_x_f16(__a, __b, __p)
2834 #define vcaddq_rot90_x_f32(__a, __b, __p) __arm_vcaddq_rot90_x_f32(__a, __b, __p)
2835 #define vcaddq_rot270_x_f16(__a, __b, __p) __arm_vcaddq_rot270_x_f16(__a, __b, __p)
2836 #define vcaddq_rot270_x_f32(__a, __b, __p) __arm_vcaddq_rot270_x_f32(__a, __b, __p)
2837 #define vcmulq_x_f16(__a, __b, __p) __arm_vcmulq_x_f16(__a, __b, __p)
2838 #define vcmulq_x_f32(__a, __b, __p) __arm_vcmulq_x_f32(__a, __b, __p)
2839 #define vcmulq_rot90_x_f16(__a, __b, __p) __arm_vcmulq_rot90_x_f16(__a, __b, __p)
2840 #define vcmulq_rot90_x_f32(__a, __b, __p) __arm_vcmulq_rot90_x_f32(__a, __b, __p)
2841 #define vcmulq_rot180_x_f16(__a, __b, __p) __arm_vcmulq_rot180_x_f16(__a, __b, __p)
2842 #define vcmulq_rot180_x_f32(__a, __b, __p) __arm_vcmulq_rot180_x_f32(__a, __b, __p)
2843 #define vcmulq_rot270_x_f16(__a, __b, __p) __arm_vcmulq_rot270_x_f16(__a, __b, __p)
2844 #define vcmulq_rot270_x_f32(__a, __b, __p) __arm_vcmulq_rot270_x_f32(__a, __b, __p)
2845 #define vcvtaq_x_s16_f16(__a, __p) __arm_vcvtaq_x_s16_f16(__a, __p)
2846 #define vcvtaq_x_s32_f32(__a, __p) __arm_vcvtaq_x_s32_f32(__a, __p)
2847 #define vcvtaq_x_u16_f16(__a, __p) __arm_vcvtaq_x_u16_f16(__a, __p)
2848 #define vcvtaq_x_u32_f32(__a, __p) __arm_vcvtaq_x_u32_f32(__a, __p)
2849 #define vcvtnq_x_s16_f16(__a, __p) __arm_vcvtnq_x_s16_f16(__a, __p)
2850 #define vcvtnq_x_s32_f32(__a, __p) __arm_vcvtnq_x_s32_f32(__a, __p)
2851 #define vcvtnq_x_u16_f16(__a, __p) __arm_vcvtnq_x_u16_f16(__a, __p)
2852 #define vcvtnq_x_u32_f32(__a, __p) __arm_vcvtnq_x_u32_f32(__a, __p)
2853 #define vcvtpq_x_s16_f16(__a, __p) __arm_vcvtpq_x_s16_f16(__a, __p)
2854 #define vcvtpq_x_s32_f32(__a, __p) __arm_vcvtpq_x_s32_f32(__a, __p)
2855 #define vcvtpq_x_u16_f16(__a, __p) __arm_vcvtpq_x_u16_f16(__a, __p)
2856 #define vcvtpq_x_u32_f32(__a, __p) __arm_vcvtpq_x_u32_f32(__a, __p)
2857 #define vcvtmq_x_s16_f16(__a, __p) __arm_vcvtmq_x_s16_f16(__a, __p)
2858 #define vcvtmq_x_s32_f32(__a, __p) __arm_vcvtmq_x_s32_f32(__a, __p)
2859 #define vcvtmq_x_u16_f16(__a, __p) __arm_vcvtmq_x_u16_f16(__a, __p)
2860 #define vcvtmq_x_u32_f32(__a, __p) __arm_vcvtmq_x_u32_f32(__a, __p)
2861 #define vcvtbq_x_f32_f16(__a, __p) __arm_vcvtbq_x_f32_f16(__a, __p)
2862 #define vcvttq_x_f32_f16(__a, __p) __arm_vcvttq_x_f32_f16(__a, __p)
2863 #define vcvtq_x_f16_u16(__a, __p) __arm_vcvtq_x_f16_u16(__a, __p)
2864 #define vcvtq_x_f16_s16(__a, __p) __arm_vcvtq_x_f16_s16(__a, __p)
2865 #define vcvtq_x_f32_s32(__a, __p) __arm_vcvtq_x_f32_s32(__a, __p)
2866 #define vcvtq_x_f32_u32(__a, __p) __arm_vcvtq_x_f32_u32(__a, __p)
2867 #define vcvtq_x_n_f16_s16(__a, __imm6, __p) __arm_vcvtq_x_n_f16_s16(__a, __imm6, __p)
2868 #define vcvtq_x_n_f16_u16(__a, __imm6, __p) __arm_vcvtq_x_n_f16_u16(__a, __imm6, __p)
2869 #define vcvtq_x_n_f32_s32(__a, __imm6, __p) __arm_vcvtq_x_n_f32_s32(__a, __imm6, __p)
2870 #define vcvtq_x_n_f32_u32(__a, __imm6, __p) __arm_vcvtq_x_n_f32_u32(__a, __imm6, __p)
2871 #define vcvtq_x_s16_f16(__a, __p) __arm_vcvtq_x_s16_f16(__a, __p)
2872 #define vcvtq_x_s32_f32(__a, __p) __arm_vcvtq_x_s32_f32(__a, __p)
2873 #define vcvtq_x_u16_f16(__a, __p) __arm_vcvtq_x_u16_f16(__a, __p)
2874 #define vcvtq_x_u32_f32(__a, __p) __arm_vcvtq_x_u32_f32(__a, __p)
2875 #define vcvtq_x_n_s16_f16(__a, __imm6, __p) __arm_vcvtq_x_n_s16_f16(__a, __imm6, __p)
2876 #define vcvtq_x_n_s32_f32(__a, __imm6, __p) __arm_vcvtq_x_n_s32_f32(__a, __imm6, __p)
2877 #define vcvtq_x_n_u16_f16(__a, __imm6, __p) __arm_vcvtq_x_n_u16_f16(__a, __imm6, __p)
2878 #define vcvtq_x_n_u32_f32(__a, __imm6, __p) __arm_vcvtq_x_n_u32_f32(__a, __imm6, __p)
2879 #define vrndq_x_f16(__a, __p) __arm_vrndq_x_f16(__a, __p)
2880 #define vrndq_x_f32(__a, __p) __arm_vrndq_x_f32(__a, __p)
2881 #define vrndnq_x_f16(__a, __p) __arm_vrndnq_x_f16(__a, __p)
2882 #define vrndnq_x_f32(__a, __p) __arm_vrndnq_x_f32(__a, __p)
2883 #define vrndmq_x_f16(__a, __p) __arm_vrndmq_x_f16(__a, __p)
2884 #define vrndmq_x_f32(__a, __p) __arm_vrndmq_x_f32(__a, __p)
2885 #define vrndpq_x_f16(__a, __p) __arm_vrndpq_x_f16(__a, __p)
2886 #define vrndpq_x_f32(__a, __p) __arm_vrndpq_x_f32(__a, __p)
2887 #define vrndaq_x_f16(__a, __p) __arm_vrndaq_x_f16(__a, __p)
2888 #define vrndaq_x_f32(__a, __p) __arm_vrndaq_x_f32(__a, __p)
2889 #define vrndxq_x_f16(__a, __p) __arm_vrndxq_x_f16(__a, __p)
2890 #define vrndxq_x_f32(__a, __p) __arm_vrndxq_x_f32(__a, __p)
2891 #define vandq_x_f16(__a, __b, __p) __arm_vandq_x_f16(__a, __b, __p)
2892 #define vandq_x_f32(__a, __b, __p) __arm_vandq_x_f32(__a, __b, __p)
2893 #define vbicq_x_f16(__a, __b, __p) __arm_vbicq_x_f16(__a, __b, __p)
2894 #define vbicq_x_f32(__a, __b, __p) __arm_vbicq_x_f32(__a, __b, __p)
2895 #define vbrsrq_x_n_f16(__a, __b, __p) __arm_vbrsrq_x_n_f16(__a, __b, __p)
2896 #define vbrsrq_x_n_f32(__a, __b, __p) __arm_vbrsrq_x_n_f32(__a, __b, __p)
2897 #define veorq_x_f16(__a, __b, __p) __arm_veorq_x_f16(__a, __b, __p)
2898 #define veorq_x_f32(__a, __b, __p) __arm_veorq_x_f32(__a, __b, __p)
2899 #define vornq_x_f16(__a, __b, __p) __arm_vornq_x_f16(__a, __b, __p)
2900 #define vornq_x_f32(__a, __b, __p) __arm_vornq_x_f32(__a, __b, __p)
2901 #define vorrq_x_f16(__a, __b, __p) __arm_vorrq_x_f16(__a, __b, __p)
2902 #define vorrq_x_f32(__a, __b, __p) __arm_vorrq_x_f32(__a, __b, __p)
2903 #define vrev32q_x_f16(__a, __p) __arm_vrev32q_x_f16(__a, __p)
2904 #define vrev64q_x_f16(__a, __p) __arm_vrev64q_x_f16(__a, __p)
2905 #define vrev64q_x_f32(__a, __p) __arm_vrev64q_x_f32(__a, __p)
2906 #define vadciq_s32(__a, __b, __carry_out) __arm_vadciq_s32(__a, __b, __carry_out)
2907 #define vadciq_u32(__a, __b, __carry_out) __arm_vadciq_u32(__a, __b, __carry_out)
2908 #define vadciq_m_s32(__inactive, __a, __b, __carry_out, __p) __arm_vadciq_m_s32(__inactive, __a, __b, __carry_out, __p)
2909 #define vadciq_m_u32(__inactive, __a, __b, __carry_out, __p) __arm_vadciq_m_u32(__inactive, __a, __b, __carry_out, __p)
2910 #define vadcq_s32(__a, __b, __carry) __arm_vadcq_s32(__a, __b, __carry)
2911 #define vadcq_u32(__a, __b, __carry) __arm_vadcq_u32(__a, __b, __carry)
2912 #define vadcq_m_s32(__inactive, __a, __b, __carry, __p) __arm_vadcq_m_s32(__inactive, __a, __b, __carry, __p)
2913 #define vadcq_m_u32(__inactive, __a, __b, __carry, __p) __arm_vadcq_m_u32(__inactive, __a, __b, __carry, __p)
2914 #define vsbciq_s32(__a, __b, __carry_out) __arm_vsbciq_s32(__a, __b, __carry_out)
2915 #define vsbciq_u32(__a, __b, __carry_out) __arm_vsbciq_u32(__a, __b, __carry_out)
2916 #define vsbciq_m_s32(__inactive, __a, __b, __carry_out, __p) __arm_vsbciq_m_s32(__inactive, __a, __b, __carry_out, __p)
2917 #define vsbciq_m_u32(__inactive, __a, __b, __carry_out, __p) __arm_vsbciq_m_u32(__inactive, __a, __b, __carry_out, __p)
2918 #define vsbcq_s32(__a, __b, __carry) __arm_vsbcq_s32(__a, __b, __carry)
2919 #define vsbcq_u32(__a, __b, __carry) __arm_vsbcq_u32(__a, __b, __carry)
2920 #define vsbcq_m_s32(__inactive, __a, __b, __carry, __p) __arm_vsbcq_m_s32(__inactive, __a, __b, __carry, __p)
2921 #define vsbcq_m_u32(__inactive, __a, __b, __carry, __p) __arm_vsbcq_m_u32(__inactive, __a, __b, __carry, __p)
2922 #define vst1q_p_u8(__addr, __value, __p) __arm_vst1q_p_u8(__addr, __value, __p)
2923 #define vst1q_p_s8(__addr, __value, __p) __arm_vst1q_p_s8(__addr, __value, __p)
2924 #define vst2q_s8(__addr, __value) __arm_vst2q_s8(__addr, __value)
2925 #define vst2q_u8(__addr, __value) __arm_vst2q_u8(__addr, __value)
2926 #define vld1q_z_u8(__base, __p) __arm_vld1q_z_u8(__base, __p)
2927 #define vld1q_z_s8(__base, __p) __arm_vld1q_z_s8(__base, __p)
2928 #define vld2q_s8(__addr) __arm_vld2q_s8(__addr)
2929 #define vld2q_u8(__addr) __arm_vld2q_u8(__addr)
2930 #define vld4q_s8(__addr) __arm_vld4q_s8(__addr)
2931 #define vld4q_u8(__addr) __arm_vld4q_u8(__addr)
2932 #define vst1q_p_u16(__addr, __value, __p) __arm_vst1q_p_u16(__addr, __value, __p)
2933 #define vst1q_p_s16(__addr, __value, __p) __arm_vst1q_p_s16(__addr, __value, __p)
2934 #define vst2q_s16(__addr, __value) __arm_vst2q_s16(__addr, __value)
2935 #define vst2q_u16(__addr, __value) __arm_vst2q_u16(__addr, __value)
2936 #define vld1q_z_u16(__base, __p) __arm_vld1q_z_u16(__base, __p)
2937 #define vld1q_z_s16(__base, __p) __arm_vld1q_z_s16(__base, __p)
2938 #define vld2q_s16(__addr) __arm_vld2q_s16(__addr)
2939 #define vld2q_u16(__addr) __arm_vld2q_u16(__addr)
2940 #define vld4q_s16(__addr) __arm_vld4q_s16(__addr)
2941 #define vld4q_u16(__addr) __arm_vld4q_u16(__addr)
2942 #define vst1q_p_u32(__addr, __value, __p) __arm_vst1q_p_u32(__addr, __value, __p)
2943 #define vst1q_p_s32(__addr, __value, __p) __arm_vst1q_p_s32(__addr, __value, __p)
2944 #define vst2q_s32(__addr, __value) __arm_vst2q_s32(__addr, __value)
2945 #define vst2q_u32(__addr, __value) __arm_vst2q_u32(__addr, __value)
2946 #define vld1q_z_u32(__base, __p) __arm_vld1q_z_u32(__base, __p)
2947 #define vld1q_z_s32(__base, __p) __arm_vld1q_z_s32(__base, __p)
2948 #define vld2q_s32(__addr) __arm_vld2q_s32(__addr)
2949 #define vld2q_u32(__addr) __arm_vld2q_u32(__addr)
2950 #define vld4q_s32(__addr) __arm_vld4q_s32(__addr)
2951 #define vld4q_u32(__addr) __arm_vld4q_u32(__addr)
2952 #define vld4q_f16(__addr) __arm_vld4q_f16(__addr)
2953 #define vld2q_f16(__addr) __arm_vld2q_f16(__addr)
2954 #define vld1q_z_f16(__base, __p) __arm_vld1q_z_f16(__base, __p)
2955 #define vst2q_f16(__addr, __value) __arm_vst2q_f16(__addr, __value)
2956 #define vst1q_p_f16(__addr, __value, __p) __arm_vst1q_p_f16(__addr, __value, __p)
2957 #define vld4q_f32(__addr) __arm_vld4q_f32(__addr)
2958 #define vld2q_f32(__addr) __arm_vld2q_f32(__addr)
2959 #define vld1q_z_f32(__base, __p) __arm_vld1q_z_f32(__base, __p)
2960 #define vst2q_f32(__addr, __value) __arm_vst2q_f32(__addr, __value)
2961 #define vst1q_p_f32(__addr, __value, __p) __arm_vst1q_p_f32(__addr, __value, __p)
2962 #define vsetq_lane_f16(__a, __b, __idx) __arm_vsetq_lane_f16(__a, __b, __idx)
2963 #define vsetq_lane_f32(__a, __b, __idx) __arm_vsetq_lane_f32(__a, __b, __idx)
2964 #define vsetq_lane_s16(__a, __b, __idx) __arm_vsetq_lane_s16(__a, __b, __idx)
2965 #define vsetq_lane_s32(__a, __b, __idx) __arm_vsetq_lane_s32(__a, __b, __idx)
2966 #define vsetq_lane_s8(__a, __b, __idx) __arm_vsetq_lane_s8(__a, __b, __idx)
2967 #define vsetq_lane_s64(__a, __b, __idx) __arm_vsetq_lane_s64(__a, __b, __idx)
2968 #define vsetq_lane_u8(__a, __b, __idx) __arm_vsetq_lane_u8(__a, __b, __idx)
2969 #define vsetq_lane_u16(__a, __b, __idx) __arm_vsetq_lane_u16(__a, __b, __idx)
2970 #define vsetq_lane_u32(__a, __b, __idx) __arm_vsetq_lane_u32(__a, __b, __idx)
2971 #define vsetq_lane_u64(__a, __b, __idx) __arm_vsetq_lane_u64(__a, __b, __idx)
2972 #define vgetq_lane_f16(__a, __idx) __arm_vgetq_lane_f16(__a, __idx)
2973 #define vgetq_lane_f32(__a, __idx) __arm_vgetq_lane_f32(__a, __idx)
2974 #define vgetq_lane_s16(__a, __idx) __arm_vgetq_lane_s16(__a, __idx)
2975 #define vgetq_lane_s32(__a, __idx) __arm_vgetq_lane_s32(__a, __idx)
2976 #define vgetq_lane_s8(__a, __idx) __arm_vgetq_lane_s8(__a, __idx)
2977 #define vgetq_lane_s64(__a, __idx) __arm_vgetq_lane_s64(__a, __idx)
2978 #define vgetq_lane_u8(__a, __idx) __arm_vgetq_lane_u8(__a, __idx)
2979 #define vgetq_lane_u16(__a, __idx) __arm_vgetq_lane_u16(__a, __idx)
2980 #define vgetq_lane_u32(__a, __idx) __arm_vgetq_lane_u32(__a, __idx)
2981 #define vgetq_lane_u64(__a, __idx) __arm_vgetq_lane_u64(__a, __idx)
2982 #define sqrshr(__p0, __p1) __arm_sqrshr(__p0, __p1)
2983 #define sqrshrl(__p0, __p1) __arm_sqrshrl(__p0, __p1)
2984 #define sqrshrl_sat48(__p0, __p1) __arm_sqrshrl_sat48(__p0, __p1)
2985 #define sqshl(__p0, __p1) __arm_sqshl(__p0, __p1)
2986 #define sqshll(__p0, __p1) __arm_sqshll(__p0, __p1)
2987 #define srshr(__p0, __p1) __arm_srshr(__p0, __p1)
2988 #define srshrl(__p0, __p1) __arm_srshrl(__p0, __p1)
2989 #define uqrshl(__p0, __p1) __arm_uqrshl(__p0, __p1)
2990 #define uqrshll(__p0, __p1) __arm_uqrshll(__p0, __p1)
2991 #define uqrshll_sat48(__p0, __p1) __arm_uqrshll_sat48(__p0, __p1)
2992 #define uqshl(__p0, __p1) __arm_uqshl(__p0, __p1)
2993 #define uqshll(__p0, __p1) __arm_uqshll(__p0, __p1)
2994 #define urshr(__p0, __p1) __arm_urshr(__p0, __p1)
2995 #define urshrl(__p0, __p1) __arm_urshrl(__p0, __p1)
2996 #define lsll(__p0, __p1) __arm_lsll(__p0, __p1)
2997 #define asrl(__p0, __p1) __arm_asrl(__p0, __p1)
2998 #define vshlcq_m_s8(__a, __b, __imm, __p) __arm_vshlcq_m_s8(__a, __b, __imm, __p)
2999 #define vshlcq_m_u8(__a, __b, __imm, __p) __arm_vshlcq_m_u8(__a, __b, __imm, __p)
3000 #define vshlcq_m_s16(__a, __b, __imm, __p) __arm_vshlcq_m_s16(__a, __b, __imm, __p)
3001 #define vshlcq_m_u16(__a, __b, __imm, __p) __arm_vshlcq_m_u16(__a, __b, __imm, __p)
3002 #define vshlcq_m_s32(__a, __b, __imm, __p) __arm_vshlcq_m_s32(__a, __b, __imm, __p)
3003 #define vshlcq_m_u32(__a, __b, __imm, __p) __arm_vshlcq_m_u32(__a, __b, __imm, __p)
3006 /* For big-endian, GCC's vector indices are reversed within each 64 bits
3007 compared to the architectural lane indices used by MVE intrinsics. */
3008 #define __ARM_NUM_LANES(__v) (sizeof (__v) / sizeof (__v[0]))
3009 #ifdef __ARM_BIG_ENDIAN
3010 #define __ARM_LANEQ(__vec, __idx) (__idx ^ (__ARM_NUM_LANES(__vec)/2 - 1))
3012 #define __ARM_LANEQ(__vec, __idx) __idx
3014 #define __ARM_CHECK_LANEQ(__vec, __idx) \
3015 __builtin_arm_lane_check (__ARM_NUM_LANES(__vec), \
3016 __ARM_LANEQ(__vec, __idx))
3018 __extension__
extern __inline
void
3019 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3020 __arm_vst4q_s8 (int8_t * __addr
, int8x16x4_t __value
)
3022 union { int8x16x4_t __i
; __builtin_neon_xi __o
; } __rv
;
3024 __builtin_mve_vst4qv16qi ((__builtin_neon_qi
*) __addr
, __rv
.__o
);
3027 __extension__
extern __inline
void
3028 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3029 __arm_vst4q_s16 (int16_t * __addr
, int16x8x4_t __value
)
3031 union { int16x8x4_t __i
; __builtin_neon_xi __o
; } __rv
;
3033 __builtin_mve_vst4qv8hi ((__builtin_neon_hi
*) __addr
, __rv
.__o
);
3036 __extension__
extern __inline
void
3037 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3038 __arm_vst4q_s32 (int32_t * __addr
, int32x4x4_t __value
)
3040 union { int32x4x4_t __i
; __builtin_neon_xi __o
; } __rv
;
3042 __builtin_mve_vst4qv4si ((__builtin_neon_si
*) __addr
, __rv
.__o
);
3045 __extension__
extern __inline
void
3046 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3047 __arm_vst4q_u8 (uint8_t * __addr
, uint8x16x4_t __value
)
3049 union { uint8x16x4_t __i
; __builtin_neon_xi __o
; } __rv
;
3051 __builtin_mve_vst4qv16qi ((__builtin_neon_qi
*) __addr
, __rv
.__o
);
3054 __extension__
extern __inline
void
3055 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3056 __arm_vst4q_u16 (uint16_t * __addr
, uint16x8x4_t __value
)
3058 union { uint16x8x4_t __i
; __builtin_neon_xi __o
; } __rv
;
3060 __builtin_mve_vst4qv8hi ((__builtin_neon_hi
*) __addr
, __rv
.__o
);
3063 __extension__
extern __inline
void
3064 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3065 __arm_vst4q_u32 (uint32_t * __addr
, uint32x4x4_t __value
)
3067 union { uint32x4x4_t __i
; __builtin_neon_xi __o
; } __rv
;
3069 __builtin_mve_vst4qv4si ((__builtin_neon_si
*) __addr
, __rv
.__o
);
3072 __extension__
extern __inline int8x16_t
3073 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3074 __arm_vdupq_n_s8 (int8_t __a
)
3076 return __builtin_mve_vdupq_n_sv16qi (__a
);
3079 __extension__
extern __inline int16x8_t
3080 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3081 __arm_vdupq_n_s16 (int16_t __a
)
3083 return __builtin_mve_vdupq_n_sv8hi (__a
);
3086 __extension__
extern __inline int32x4_t
3087 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3088 __arm_vdupq_n_s32 (int32_t __a
)
3090 return __builtin_mve_vdupq_n_sv4si (__a
);
3093 __extension__
extern __inline int8x16_t
3094 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3095 __arm_vabsq_s8 (int8x16_t __a
)
3097 return __builtin_mve_vabsq_sv16qi (__a
);
3100 __extension__
extern __inline int16x8_t
3101 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3102 __arm_vabsq_s16 (int16x8_t __a
)
3104 return __builtin_mve_vabsq_sv8hi (__a
);
3107 __extension__
extern __inline int32x4_t
3108 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3109 __arm_vabsq_s32 (int32x4_t __a
)
3111 return __builtin_mve_vabsq_sv4si (__a
);
3114 __extension__
extern __inline int8x16_t
3115 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3116 __arm_vclsq_s8 (int8x16_t __a
)
3118 return __builtin_mve_vclsq_sv16qi (__a
);
3121 __extension__
extern __inline int16x8_t
3122 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3123 __arm_vclsq_s16 (int16x8_t __a
)
3125 return __builtin_mve_vclsq_sv8hi (__a
);
3128 __extension__
extern __inline int32x4_t
3129 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3130 __arm_vclsq_s32 (int32x4_t __a
)
3132 return __builtin_mve_vclsq_sv4si (__a
);
3135 __extension__
extern __inline int8x16_t
3136 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3137 __arm_vclzq_s8 (int8x16_t __a
)
3139 return __builtin_mve_vclzq_sv16qi (__a
);
3142 __extension__
extern __inline int16x8_t
3143 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3144 __arm_vclzq_s16 (int16x8_t __a
)
3146 return __builtin_mve_vclzq_sv8hi (__a
);
3149 __extension__
extern __inline int32x4_t
3150 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3151 __arm_vclzq_s32 (int32x4_t __a
)
3153 return __builtin_mve_vclzq_sv4si (__a
);
3156 __extension__
extern __inline int8x16_t
3157 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3158 __arm_vnegq_s8 (int8x16_t __a
)
3160 return __builtin_mve_vnegq_sv16qi (__a
);
3163 __extension__
extern __inline int16x8_t
3164 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3165 __arm_vnegq_s16 (int16x8_t __a
)
3167 return __builtin_mve_vnegq_sv8hi (__a
);
3170 __extension__
extern __inline int32x4_t
3171 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3172 __arm_vnegq_s32 (int32x4_t __a
)
3174 return __builtin_mve_vnegq_sv4si (__a
);
3177 __extension__
extern __inline
int64_t
3178 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3179 __arm_vaddlvq_s32 (int32x4_t __a
)
3181 return __builtin_mve_vaddlvq_sv4si (__a
);
3184 __extension__
extern __inline
int32_t
3185 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3186 __arm_vaddvq_s8 (int8x16_t __a
)
3188 return __builtin_mve_vaddvq_sv16qi (__a
);
3191 __extension__
extern __inline
int32_t
3192 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3193 __arm_vaddvq_s16 (int16x8_t __a
)
3195 return __builtin_mve_vaddvq_sv8hi (__a
);
3198 __extension__
extern __inline
int32_t
3199 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3200 __arm_vaddvq_s32 (int32x4_t __a
)
3202 return __builtin_mve_vaddvq_sv4si (__a
);
3205 __extension__
extern __inline int16x8_t
3206 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3207 __arm_vmovlbq_s8 (int8x16_t __a
)
3209 return __builtin_mve_vmovlbq_sv16qi (__a
);
3212 __extension__
extern __inline int32x4_t
3213 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3214 __arm_vmovlbq_s16 (int16x8_t __a
)
3216 return __builtin_mve_vmovlbq_sv8hi (__a
);
3219 __extension__
extern __inline int16x8_t
3220 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3221 __arm_vmovltq_s8 (int8x16_t __a
)
3223 return __builtin_mve_vmovltq_sv16qi (__a
);
3226 __extension__
extern __inline int32x4_t
3227 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3228 __arm_vmovltq_s16 (int16x8_t __a
)
3230 return __builtin_mve_vmovltq_sv8hi (__a
);
3233 __extension__
extern __inline int8x16_t
3234 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3235 __arm_vmvnq_s8 (int8x16_t __a
)
3237 return __builtin_mve_vmvnq_sv16qi (__a
);
3240 __extension__
extern __inline int16x8_t
3241 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3242 __arm_vmvnq_s16 (int16x8_t __a
)
3244 return __builtin_mve_vmvnq_sv8hi (__a
);
3247 __extension__
extern __inline int32x4_t
3248 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3249 __arm_vmvnq_s32 (int32x4_t __a
)
3251 return __builtin_mve_vmvnq_sv4si (__a
);
3254 __extension__
extern __inline int16x8_t
3255 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3256 __arm_vmvnq_n_s16 (const int16_t __imm
)
3258 return __builtin_mve_vmvnq_n_sv8hi (__imm
);
3261 __extension__
extern __inline int32x4_t
3262 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3263 __arm_vmvnq_n_s32 (const int32_t __imm
)
3265 return __builtin_mve_vmvnq_n_sv4si (__imm
);
3268 __extension__
extern __inline int8x16_t
3269 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3270 __arm_vrev16q_s8 (int8x16_t __a
)
3272 return __builtin_mve_vrev16q_sv16qi (__a
);
3275 __extension__
extern __inline int8x16_t
3276 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3277 __arm_vrev32q_s8 (int8x16_t __a
)
3279 return __builtin_mve_vrev32q_sv16qi (__a
);
3282 __extension__
extern __inline int16x8_t
3283 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3284 __arm_vrev32q_s16 (int16x8_t __a
)
3286 return __builtin_mve_vrev32q_sv8hi (__a
);
3289 __extension__
extern __inline int8x16_t
3290 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3291 __arm_vrev64q_s8 (int8x16_t __a
)
3293 return __builtin_mve_vrev64q_sv16qi (__a
);
3296 __extension__
extern __inline int16x8_t
3297 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3298 __arm_vrev64q_s16 (int16x8_t __a
)
3300 return __builtin_mve_vrev64q_sv8hi (__a
);
3303 __extension__
extern __inline int32x4_t
3304 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3305 __arm_vrev64q_s32 (int32x4_t __a
)
3307 return __builtin_mve_vrev64q_sv4si (__a
);
3310 __extension__
extern __inline int8x16_t
3311 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3312 __arm_vqabsq_s8 (int8x16_t __a
)
3314 return __builtin_mve_vqabsq_sv16qi (__a
);
3317 __extension__
extern __inline int16x8_t
3318 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3319 __arm_vqabsq_s16 (int16x8_t __a
)
3321 return __builtin_mve_vqabsq_sv8hi (__a
);
3324 __extension__
extern __inline int32x4_t
3325 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3326 __arm_vqabsq_s32 (int32x4_t __a
)
3328 return __builtin_mve_vqabsq_sv4si (__a
);
3331 __extension__
extern __inline int8x16_t
3332 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3333 __arm_vqnegq_s8 (int8x16_t __a
)
3335 return __builtin_mve_vqnegq_sv16qi (__a
);
3338 __extension__
extern __inline int16x8_t
3339 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3340 __arm_vqnegq_s16 (int16x8_t __a
)
3342 return __builtin_mve_vqnegq_sv8hi (__a
);
3345 __extension__
extern __inline int32x4_t
3346 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3347 __arm_vqnegq_s32 (int32x4_t __a
)
3349 return __builtin_mve_vqnegq_sv4si (__a
);
3352 __extension__
extern __inline uint8x16_t
3353 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3354 __arm_vrev64q_u8 (uint8x16_t __a
)
3356 return __builtin_mve_vrev64q_uv16qi (__a
);
3359 __extension__
extern __inline uint16x8_t
3360 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3361 __arm_vrev64q_u16 (uint16x8_t __a
)
3363 return __builtin_mve_vrev64q_uv8hi (__a
);
3366 __extension__
extern __inline uint32x4_t
3367 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3368 __arm_vrev64q_u32 (uint32x4_t __a
)
3370 return __builtin_mve_vrev64q_uv4si (__a
);
3373 __extension__
extern __inline uint8x16_t
3374 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3375 __arm_vmvnq_u8 (uint8x16_t __a
)
3377 return __builtin_mve_vmvnq_uv16qi (__a
);
3380 __extension__
extern __inline uint16x8_t
3381 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3382 __arm_vmvnq_u16 (uint16x8_t __a
)
3384 return __builtin_mve_vmvnq_uv8hi (__a
);
3387 __extension__
extern __inline uint32x4_t
3388 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3389 __arm_vmvnq_u32 (uint32x4_t __a
)
3391 return __builtin_mve_vmvnq_uv4si (__a
);
3394 __extension__
extern __inline uint8x16_t
3395 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3396 __arm_vdupq_n_u8 (uint8_t __a
)
3398 return __builtin_mve_vdupq_n_uv16qi (__a
);
3401 __extension__
extern __inline uint16x8_t
3402 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3403 __arm_vdupq_n_u16 (uint16_t __a
)
3405 return __builtin_mve_vdupq_n_uv8hi (__a
);
3408 __extension__
extern __inline uint32x4_t
3409 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3410 __arm_vdupq_n_u32 (uint32_t __a
)
3412 return __builtin_mve_vdupq_n_uv4si (__a
);
3415 __extension__
extern __inline uint8x16_t
3416 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3417 __arm_vclzq_u8 (uint8x16_t __a
)
3419 return __builtin_mve_vclzq_uv16qi (__a
);
3422 __extension__
extern __inline uint16x8_t
3423 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3424 __arm_vclzq_u16 (uint16x8_t __a
)
3426 return __builtin_mve_vclzq_uv8hi (__a
);
3429 __extension__
extern __inline uint32x4_t
3430 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3431 __arm_vclzq_u32 (uint32x4_t __a
)
3433 return __builtin_mve_vclzq_uv4si (__a
);
3436 __extension__
extern __inline
uint32_t
3437 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3438 __arm_vaddvq_u8 (uint8x16_t __a
)
3440 return __builtin_mve_vaddvq_uv16qi (__a
);
3443 __extension__
extern __inline
uint32_t
3444 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3445 __arm_vaddvq_u16 (uint16x8_t __a
)
3447 return __builtin_mve_vaddvq_uv8hi (__a
);
3450 __extension__
extern __inline
uint32_t
3451 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3452 __arm_vaddvq_u32 (uint32x4_t __a
)
3454 return __builtin_mve_vaddvq_uv4si (__a
);
3457 __extension__
extern __inline uint8x16_t
3458 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3459 __arm_vrev32q_u8 (uint8x16_t __a
)
3461 return __builtin_mve_vrev32q_uv16qi (__a
);
3464 __extension__
extern __inline uint16x8_t
3465 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3466 __arm_vrev32q_u16 (uint16x8_t __a
)
3468 return __builtin_mve_vrev32q_uv8hi (__a
);
3471 __extension__
extern __inline uint16x8_t
3472 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3473 __arm_vmovltq_u8 (uint8x16_t __a
)
3475 return __builtin_mve_vmovltq_uv16qi (__a
);
3478 __extension__
extern __inline uint32x4_t
3479 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3480 __arm_vmovltq_u16 (uint16x8_t __a
)
3482 return __builtin_mve_vmovltq_uv8hi (__a
);
3485 __extension__
extern __inline uint16x8_t
3486 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3487 __arm_vmovlbq_u8 (uint8x16_t __a
)
3489 return __builtin_mve_vmovlbq_uv16qi (__a
);
3492 __extension__
extern __inline uint32x4_t
3493 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3494 __arm_vmovlbq_u16 (uint16x8_t __a
)
3496 return __builtin_mve_vmovlbq_uv8hi (__a
);
3499 __extension__
extern __inline uint16x8_t
3500 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3501 __arm_vmvnq_n_u16 (const int __imm
)
3503 return __builtin_mve_vmvnq_n_uv8hi (__imm
);
3506 __extension__
extern __inline uint32x4_t
3507 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3508 __arm_vmvnq_n_u32 (const int __imm
)
3510 return __builtin_mve_vmvnq_n_uv4si (__imm
);
3513 __extension__
extern __inline uint8x16_t
3514 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3515 __arm_vrev16q_u8 (uint8x16_t __a
)
3517 return __builtin_mve_vrev16q_uv16qi (__a
);
3520 __extension__
extern __inline
uint64_t
3521 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3522 __arm_vaddlvq_u32 (uint32x4_t __a
)
3524 return __builtin_mve_vaddlvq_uv4si (__a
);
3527 __extension__
extern __inline mve_pred16_t
3528 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3529 __arm_vctp16q (uint32_t __a
)
3531 return __builtin_mve_vctp16qhi (__a
);
3534 __extension__
extern __inline mve_pred16_t
3535 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3536 __arm_vctp32q (uint32_t __a
)
3538 return __builtin_mve_vctp32qhi (__a
);
3541 __extension__
extern __inline mve_pred16_t
3542 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3543 __arm_vctp64q (uint32_t __a
)
3545 return __builtin_mve_vctp64qhi (__a
);
3548 __extension__
extern __inline mve_pred16_t
3549 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3550 __arm_vctp8q (uint32_t __a
)
3552 return __builtin_mve_vctp8qhi (__a
);
3555 __extension__
extern __inline mve_pred16_t
3556 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3557 __arm_vpnot (mve_pred16_t __a
)
3559 return __builtin_mve_vpnothi (__a
);
3562 __extension__
extern __inline uint8x16_t
3563 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3564 __arm_vcreateq_u8 (uint64_t __a
, uint64_t __b
)
3566 return __builtin_mve_vcreateq_uv16qi (__a
, __b
);
3569 __extension__
extern __inline uint16x8_t
3570 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3571 __arm_vcreateq_u16 (uint64_t __a
, uint64_t __b
)
3573 return __builtin_mve_vcreateq_uv8hi (__a
, __b
);
3576 __extension__
extern __inline uint32x4_t
3577 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3578 __arm_vcreateq_u32 (uint64_t __a
, uint64_t __b
)
3580 return __builtin_mve_vcreateq_uv4si (__a
, __b
);
3583 __extension__
extern __inline uint64x2_t
3584 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3585 __arm_vcreateq_u64 (uint64_t __a
, uint64_t __b
)
3587 return __builtin_mve_vcreateq_uv2di (__a
, __b
);
3590 __extension__
extern __inline int8x16_t
3591 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3592 __arm_vcreateq_s8 (uint64_t __a
, uint64_t __b
)
3594 return __builtin_mve_vcreateq_sv16qi (__a
, __b
);
3597 __extension__
extern __inline int16x8_t
3598 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3599 __arm_vcreateq_s16 (uint64_t __a
, uint64_t __b
)
3601 return __builtin_mve_vcreateq_sv8hi (__a
, __b
);
3604 __extension__
extern __inline int32x4_t
3605 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3606 __arm_vcreateq_s32 (uint64_t __a
, uint64_t __b
)
3608 return __builtin_mve_vcreateq_sv4si (__a
, __b
);
3611 __extension__
extern __inline int64x2_t
3612 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3613 __arm_vcreateq_s64 (uint64_t __a
, uint64_t __b
)
3615 return __builtin_mve_vcreateq_sv2di (__a
, __b
);
3618 __extension__
extern __inline int8x16_t
3619 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3620 __arm_vshrq_n_s8 (int8x16_t __a
, const int __imm
)
3622 return __builtin_mve_vshrq_n_sv16qi (__a
, __imm
);
3625 __extension__
extern __inline int16x8_t
3626 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3627 __arm_vshrq_n_s16 (int16x8_t __a
, const int __imm
)
3629 return __builtin_mve_vshrq_n_sv8hi (__a
, __imm
);
3632 __extension__
extern __inline int32x4_t
3633 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3634 __arm_vshrq_n_s32 (int32x4_t __a
, const int __imm
)
3636 return __builtin_mve_vshrq_n_sv4si (__a
, __imm
);
3639 __extension__
extern __inline uint8x16_t
3640 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3641 __arm_vshrq_n_u8 (uint8x16_t __a
, const int __imm
)
3643 return __builtin_mve_vshrq_n_uv16qi (__a
, __imm
);
3646 __extension__
extern __inline uint16x8_t
3647 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3648 __arm_vshrq_n_u16 (uint16x8_t __a
, const int __imm
)
3650 return __builtin_mve_vshrq_n_uv8hi (__a
, __imm
);
3653 __extension__
extern __inline uint32x4_t
3654 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3655 __arm_vshrq_n_u32 (uint32x4_t __a
, const int __imm
)
3657 return __builtin_mve_vshrq_n_uv4si (__a
, __imm
);
3659 __extension__
extern __inline
int64_t
3660 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3661 __arm_vaddlvq_p_s32 (int32x4_t __a
, mve_pred16_t __p
)
3663 return __builtin_mve_vaddlvq_p_sv4si (__a
, __p
);
3666 __extension__
extern __inline
uint64_t
3667 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3668 __arm_vaddlvq_p_u32 (uint32x4_t __a
, mve_pred16_t __p
)
3670 return __builtin_mve_vaddlvq_p_uv4si (__a
, __p
);
3673 __extension__
extern __inline mve_pred16_t
3674 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3675 __arm_vcmpneq_s8 (int8x16_t __a
, int8x16_t __b
)
3677 return __builtin_mve_vcmpneq_v16qi (__a
, __b
);
3680 __extension__
extern __inline mve_pred16_t
3681 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3682 __arm_vcmpneq_s16 (int16x8_t __a
, int16x8_t __b
)
3684 return __builtin_mve_vcmpneq_v8hi (__a
, __b
);
3687 __extension__
extern __inline mve_pred16_t
3688 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3689 __arm_vcmpneq_s32 (int32x4_t __a
, int32x4_t __b
)
3691 return __builtin_mve_vcmpneq_v4si (__a
, __b
);
3694 __extension__
extern __inline mve_pred16_t
3695 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3696 __arm_vcmpneq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3698 return __builtin_mve_vcmpneq_v16qi ((int8x16_t
)__a
, (int8x16_t
)__b
);
3701 __extension__
extern __inline mve_pred16_t
3702 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3703 __arm_vcmpneq_u16 (uint16x8_t __a
, uint16x8_t __b
)
3705 return __builtin_mve_vcmpneq_v8hi ((int16x8_t
)__a
, (int16x8_t
)__b
);
3708 __extension__
extern __inline mve_pred16_t
3709 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3710 __arm_vcmpneq_u32 (uint32x4_t __a
, uint32x4_t __b
)
3712 return __builtin_mve_vcmpneq_v4si ((int32x4_t
)__a
, (int32x4_t
)__b
);
3715 __extension__
extern __inline int8x16_t
3716 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3717 __arm_vshlq_s8 (int8x16_t __a
, int8x16_t __b
)
3719 return __builtin_mve_vshlq_sv16qi (__a
, __b
);
3722 __extension__
extern __inline int16x8_t
3723 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3724 __arm_vshlq_s16 (int16x8_t __a
, int16x8_t __b
)
3726 return __builtin_mve_vshlq_sv8hi (__a
, __b
);
3729 __extension__
extern __inline int32x4_t
3730 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3731 __arm_vshlq_s32 (int32x4_t __a
, int32x4_t __b
)
3733 return __builtin_mve_vshlq_sv4si (__a
, __b
);
3736 __extension__
extern __inline uint8x16_t
3737 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3738 __arm_vshlq_u8 (uint8x16_t __a
, int8x16_t __b
)
3740 return __builtin_mve_vshlq_uv16qi (__a
, __b
);
3743 __extension__
extern __inline uint16x8_t
3744 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3745 __arm_vshlq_u16 (uint16x8_t __a
, int16x8_t __b
)
3747 return __builtin_mve_vshlq_uv8hi (__a
, __b
);
3750 __extension__
extern __inline uint32x4_t
3751 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3752 __arm_vshlq_u32 (uint32x4_t __a
, int32x4_t __b
)
3754 return __builtin_mve_vshlq_uv4si (__a
, __b
);
3756 __extension__
extern __inline uint8x16_t
3757 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3758 __arm_vsubq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3760 return __builtin_mve_vsubq_uv16qi (__a
, __b
);
3763 __extension__
extern __inline uint8x16_t
3764 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3765 __arm_vsubq_n_u8 (uint8x16_t __a
, uint8_t __b
)
3767 return __builtin_mve_vsubq_n_uv16qi (__a
, __b
);
3770 __extension__
extern __inline uint8x16_t
3771 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3772 __arm_vrmulhq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3774 return __builtin_mve_vrmulhq_uv16qi (__a
, __b
);
3777 __extension__
extern __inline uint8x16_t
3778 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3779 __arm_vrhaddq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3781 return __builtin_mve_vrhaddq_uv16qi (__a
, __b
);
3784 __extension__
extern __inline uint8x16_t
3785 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3786 __arm_vqsubq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3788 return __builtin_mve_vqsubq_uv16qi (__a
, __b
);
3791 __extension__
extern __inline uint8x16_t
3792 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3793 __arm_vqsubq_n_u8 (uint8x16_t __a
, uint8_t __b
)
3795 return __builtin_mve_vqsubq_n_uv16qi (__a
, __b
);
3798 __extension__
extern __inline uint8x16_t
3799 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3800 __arm_vqaddq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3802 return __builtin_mve_vqaddq_uv16qi (__a
, __b
);
3805 __extension__
extern __inline uint8x16_t
3806 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3807 __arm_vqaddq_n_u8 (uint8x16_t __a
, uint8_t __b
)
3809 return __builtin_mve_vqaddq_n_uv16qi (__a
, __b
);
3812 __extension__
extern __inline uint8x16_t
3813 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3814 __arm_vorrq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3816 return __builtin_mve_vorrq_uv16qi (__a
, __b
);
3819 __extension__
extern __inline uint8x16_t
3820 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3821 __arm_vornq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3823 return __builtin_mve_vornq_uv16qi (__a
, __b
);
3826 __extension__
extern __inline uint8x16_t
3827 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3828 __arm_vmulq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3830 return __builtin_mve_vmulq_uv16qi (__a
, __b
);
3833 __extension__
extern __inline uint8x16_t
3834 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3835 __arm_vmulq_n_u8 (uint8x16_t __a
, uint8_t __b
)
3837 return __builtin_mve_vmulq_n_uv16qi (__a
, __b
);
3840 __extension__
extern __inline uint16x8_t
3841 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3842 __arm_vmulltq_int_u8 (uint8x16_t __a
, uint8x16_t __b
)
3844 return __builtin_mve_vmulltq_int_uv16qi (__a
, __b
);
3847 __extension__
extern __inline uint16x8_t
3848 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3849 __arm_vmullbq_int_u8 (uint8x16_t __a
, uint8x16_t __b
)
3851 return __builtin_mve_vmullbq_int_uv16qi (__a
, __b
);
3854 __extension__
extern __inline uint8x16_t
3855 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3856 __arm_vmulhq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3858 return __builtin_mve_vmulhq_uv16qi (__a
, __b
);
3861 __extension__
extern __inline
uint32_t
3862 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3863 __arm_vmladavq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3865 return __builtin_mve_vmladavq_uv16qi (__a
, __b
);
3868 __extension__
extern __inline
uint8_t
3869 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3870 __arm_vminvq_u8 (uint8_t __a
, uint8x16_t __b
)
3872 return __builtin_mve_vminvq_uv16qi (__a
, __b
);
3875 __extension__
extern __inline uint8x16_t
3876 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3877 __arm_vminq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3879 return __builtin_mve_vminq_uv16qi (__a
, __b
);
3882 __extension__
extern __inline
uint8_t
3883 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3884 __arm_vmaxvq_u8 (uint8_t __a
, uint8x16_t __b
)
3886 return __builtin_mve_vmaxvq_uv16qi (__a
, __b
);
3889 __extension__
extern __inline uint8x16_t
3890 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3891 __arm_vmaxq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3893 return __builtin_mve_vmaxq_uv16qi (__a
, __b
);
3896 __extension__
extern __inline uint8x16_t
3897 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3898 __arm_vhsubq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3900 return __builtin_mve_vhsubq_uv16qi (__a
, __b
);
3903 __extension__
extern __inline uint8x16_t
3904 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3905 __arm_vhsubq_n_u8 (uint8x16_t __a
, uint8_t __b
)
3907 return __builtin_mve_vhsubq_n_uv16qi (__a
, __b
);
3910 __extension__
extern __inline uint8x16_t
3911 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3912 __arm_vhaddq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3914 return __builtin_mve_vhaddq_uv16qi (__a
, __b
);
3917 __extension__
extern __inline uint8x16_t
3918 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3919 __arm_vhaddq_n_u8 (uint8x16_t __a
, uint8_t __b
)
3921 return __builtin_mve_vhaddq_n_uv16qi (__a
, __b
);
3924 __extension__
extern __inline uint8x16_t
3925 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3926 __arm_veorq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3928 return __builtin_mve_veorq_uv16qi (__a
, __b
);
3931 __extension__
extern __inline mve_pred16_t
3932 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3933 __arm_vcmpneq_n_u8 (uint8x16_t __a
, uint8_t __b
)
3935 return __builtin_mve_vcmpneq_n_v16qi ((int8x16_t
)__a
, (int8_t)__b
);
3938 __extension__
extern __inline mve_pred16_t
3939 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3940 __arm_vcmphiq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3942 return __builtin_mve_vcmphiq_v16qi (__a
, __b
);
3945 __extension__
extern __inline mve_pred16_t
3946 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3947 __arm_vcmphiq_n_u8 (uint8x16_t __a
, uint8_t __b
)
3949 return __builtin_mve_vcmphiq_n_v16qi (__a
, __b
);
3952 __extension__
extern __inline mve_pred16_t
3953 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3954 __arm_vcmpeqq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3956 return __builtin_mve_vcmpeqq_v16qi ((int8x16_t
)__a
, (int8x16_t
)__b
);
3959 __extension__
extern __inline mve_pred16_t
3960 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3961 __arm_vcmpeqq_n_u8 (uint8x16_t __a
, uint8_t __b
)
3963 return __builtin_mve_vcmpeqq_n_v16qi ((int8x16_t
)__a
, (int8_t)__b
);
3966 __extension__
extern __inline mve_pred16_t
3967 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3968 __arm_vcmpcsq_u8 (uint8x16_t __a
, uint8x16_t __b
)
3970 return __builtin_mve_vcmpcsq_v16qi (__a
, __b
);
3973 __extension__
extern __inline mve_pred16_t
3974 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3975 __arm_vcmpcsq_n_u8 (uint8x16_t __a
, uint8_t __b
)
3977 return __builtin_mve_vcmpcsq_n_v16qi (__a
, __b
);
3980 __extension__
extern __inline uint8x16_t
3981 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3982 __arm_vcaddq_rot90_u8 (uint8x16_t __a
, uint8x16_t __b
)
3985 __builtin_mve_vcaddq_rot90v16qi ((int8x16_t
)__a
, (int8x16_t
)__b
);
3988 __extension__
extern __inline uint8x16_t
3989 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3990 __arm_vcaddq_rot270_u8 (uint8x16_t __a
, uint8x16_t __b
)
3993 __builtin_mve_vcaddq_rot270v16qi ((int8x16_t
)__a
, (int8x16_t
)__b
);
3996 __extension__
extern __inline uint8x16_t
3997 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
3998 __arm_vbicq_u8 (uint8x16_t __a
, uint8x16_t __b
)
4000 return __builtin_mve_vbicq_uv16qi (__a
, __b
);
4003 __extension__
extern __inline uint8x16_t
4004 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4005 __arm_vandq_u8 (uint8x16_t __a
, uint8x16_t __b
)
4007 return __builtin_mve_vandq_uv16qi (__a
, __b
);
4010 __extension__
extern __inline
uint32_t
4011 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4012 __arm_vaddvq_p_u8 (uint8x16_t __a
, mve_pred16_t __p
)
4014 return __builtin_mve_vaddvq_p_uv16qi (__a
, __p
);
4017 __extension__
extern __inline
uint32_t
4018 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4019 __arm_vaddvaq_u8 (uint32_t __a
, uint8x16_t __b
)
4021 return __builtin_mve_vaddvaq_uv16qi (__a
, __b
);
4024 __extension__
extern __inline uint8x16_t
4025 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4026 __arm_vaddq_n_u8 (uint8x16_t __a
, uint8_t __b
)
4028 return __builtin_mve_vaddq_n_uv16qi (__a
, __b
);
4031 __extension__
extern __inline uint8x16_t
4032 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4033 __arm_vabdq_u8 (uint8x16_t __a
, uint8x16_t __b
)
4035 return __builtin_mve_vabdq_uv16qi (__a
, __b
);
4038 __extension__
extern __inline uint8x16_t
4039 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4040 __arm_vshlq_r_u8 (uint8x16_t __a
, int32_t __b
)
4042 return __builtin_mve_vshlq_r_uv16qi (__a
, __b
);
4045 __extension__
extern __inline uint8x16_t
4046 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4047 __arm_vrshlq_u8 (uint8x16_t __a
, int8x16_t __b
)
4049 return __builtin_mve_vrshlq_uv16qi (__a
, __b
);
4052 __extension__
extern __inline uint8x16_t
4053 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4054 __arm_vrshlq_n_u8 (uint8x16_t __a
, int32_t __b
)
4056 return __builtin_mve_vrshlq_n_uv16qi (__a
, __b
);
4059 __extension__
extern __inline uint8x16_t
4060 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4061 __arm_vqshlq_u8 (uint8x16_t __a
, int8x16_t __b
)
4063 return __builtin_mve_vqshlq_uv16qi (__a
, __b
);
4066 __extension__
extern __inline uint8x16_t
4067 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4068 __arm_vqshlq_r_u8 (uint8x16_t __a
, int32_t __b
)
4070 return __builtin_mve_vqshlq_r_uv16qi (__a
, __b
);
4073 __extension__
extern __inline uint8x16_t
4074 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4075 __arm_vqrshlq_u8 (uint8x16_t __a
, int8x16_t __b
)
4077 return __builtin_mve_vqrshlq_uv16qi (__a
, __b
);
4080 __extension__
extern __inline uint8x16_t
4081 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4082 __arm_vqrshlq_n_u8 (uint8x16_t __a
, int32_t __b
)
4084 return __builtin_mve_vqrshlq_n_uv16qi (__a
, __b
);
4087 __extension__
extern __inline
uint8_t
4088 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4089 __arm_vminavq_s8 (uint8_t __a
, int8x16_t __b
)
4091 return __builtin_mve_vminavq_sv16qi (__a
, __b
);
4094 __extension__
extern __inline uint8x16_t
4095 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4096 __arm_vminaq_s8 (uint8x16_t __a
, int8x16_t __b
)
4098 return __builtin_mve_vminaq_sv16qi (__a
, __b
);
4101 __extension__
extern __inline
uint8_t
4102 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4103 __arm_vmaxavq_s8 (uint8_t __a
, int8x16_t __b
)
4105 return __builtin_mve_vmaxavq_sv16qi (__a
, __b
);
4108 __extension__
extern __inline uint8x16_t
4109 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4110 __arm_vmaxaq_s8 (uint8x16_t __a
, int8x16_t __b
)
4112 return __builtin_mve_vmaxaq_sv16qi (__a
, __b
);
4115 __extension__
extern __inline uint8x16_t
4116 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4117 __arm_vbrsrq_n_u8 (uint8x16_t __a
, int32_t __b
)
4119 return __builtin_mve_vbrsrq_n_uv16qi (__a
, __b
);
4122 __extension__
extern __inline uint8x16_t
4123 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4124 __arm_vshlq_n_u8 (uint8x16_t __a
, const int __imm
)
4126 return __builtin_mve_vshlq_n_uv16qi (__a
, __imm
);
4129 __extension__
extern __inline uint8x16_t
4130 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4131 __arm_vrshrq_n_u8 (uint8x16_t __a
, const int __imm
)
4133 return __builtin_mve_vrshrq_n_uv16qi (__a
, __imm
);
4136 __extension__
extern __inline uint8x16_t
4137 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4138 __arm_vqshlq_n_u8 (uint8x16_t __a
, const int __imm
)
4140 return __builtin_mve_vqshlq_n_uv16qi (__a
, __imm
);
4143 __extension__
extern __inline mve_pred16_t
4144 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4145 __arm_vcmpneq_n_s8 (int8x16_t __a
, int8_t __b
)
4147 return __builtin_mve_vcmpneq_n_v16qi (__a
, __b
);
4150 __extension__
extern __inline mve_pred16_t
4151 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4152 __arm_vcmpltq_s8 (int8x16_t __a
, int8x16_t __b
)
4154 return __builtin_mve_vcmpltq_v16qi (__a
, __b
);
4157 __extension__
extern __inline mve_pred16_t
4158 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4159 __arm_vcmpltq_n_s8 (int8x16_t __a
, int8_t __b
)
4161 return __builtin_mve_vcmpltq_n_v16qi (__a
, __b
);
4164 __extension__
extern __inline mve_pred16_t
4165 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4166 __arm_vcmpleq_s8 (int8x16_t __a
, int8x16_t __b
)
4168 return __builtin_mve_vcmpleq_v16qi (__a
, __b
);
4171 __extension__
extern __inline mve_pred16_t
4172 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4173 __arm_vcmpleq_n_s8 (int8x16_t __a
, int8_t __b
)
4175 return __builtin_mve_vcmpleq_n_v16qi (__a
, __b
);
4178 __extension__
extern __inline mve_pred16_t
4179 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4180 __arm_vcmpgtq_s8 (int8x16_t __a
, int8x16_t __b
)
4182 return __builtin_mve_vcmpgtq_v16qi (__a
, __b
);
4185 __extension__
extern __inline mve_pred16_t
4186 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4187 __arm_vcmpgtq_n_s8 (int8x16_t __a
, int8_t __b
)
4189 return __builtin_mve_vcmpgtq_n_v16qi (__a
, __b
);
4192 __extension__
extern __inline mve_pred16_t
4193 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4194 __arm_vcmpgeq_s8 (int8x16_t __a
, int8x16_t __b
)
4196 return __builtin_mve_vcmpgeq_v16qi (__a
, __b
);
4199 __extension__
extern __inline mve_pred16_t
4200 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4201 __arm_vcmpgeq_n_s8 (int8x16_t __a
, int8_t __b
)
4203 return __builtin_mve_vcmpgeq_n_v16qi (__a
, __b
);
4206 __extension__
extern __inline mve_pred16_t
4207 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4208 __arm_vcmpeqq_s8 (int8x16_t __a
, int8x16_t __b
)
4210 return __builtin_mve_vcmpeqq_v16qi (__a
, __b
);
4213 __extension__
extern __inline mve_pred16_t
4214 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4215 __arm_vcmpeqq_n_s8 (int8x16_t __a
, int8_t __b
)
4217 return __builtin_mve_vcmpeqq_n_v16qi (__a
, __b
);
4220 __extension__
extern __inline uint8x16_t
4221 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4222 __arm_vqshluq_n_s8 (int8x16_t __a
, const int __imm
)
4224 return __builtin_mve_vqshluq_n_sv16qi (__a
, __imm
);
4227 __extension__
extern __inline
int32_t
4228 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4229 __arm_vaddvq_p_s8 (int8x16_t __a
, mve_pred16_t __p
)
4231 return __builtin_mve_vaddvq_p_sv16qi (__a
, __p
);
4234 __extension__
extern __inline int8x16_t
4235 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4236 __arm_vsubq_s8 (int8x16_t __a
, int8x16_t __b
)
4238 return __builtin_mve_vsubq_sv16qi (__a
, __b
);
4241 __extension__
extern __inline int8x16_t
4242 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4243 __arm_vsubq_n_s8 (int8x16_t __a
, int8_t __b
)
4245 return __builtin_mve_vsubq_n_sv16qi (__a
, __b
);
4248 __extension__
extern __inline int8x16_t
4249 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4250 __arm_vshlq_r_s8 (int8x16_t __a
, int32_t __b
)
4252 return __builtin_mve_vshlq_r_sv16qi (__a
, __b
);
4255 __extension__
extern __inline int8x16_t
4256 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4257 __arm_vrshlq_s8 (int8x16_t __a
, int8x16_t __b
)
4259 return __builtin_mve_vrshlq_sv16qi (__a
, __b
);
4262 __extension__
extern __inline int8x16_t
4263 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4264 __arm_vrshlq_n_s8 (int8x16_t __a
, int32_t __b
)
4266 return __builtin_mve_vrshlq_n_sv16qi (__a
, __b
);
4269 __extension__
extern __inline int8x16_t
4270 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4271 __arm_vrmulhq_s8 (int8x16_t __a
, int8x16_t __b
)
4273 return __builtin_mve_vrmulhq_sv16qi (__a
, __b
);
4276 __extension__
extern __inline int8x16_t
4277 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4278 __arm_vrhaddq_s8 (int8x16_t __a
, int8x16_t __b
)
4280 return __builtin_mve_vrhaddq_sv16qi (__a
, __b
);
4283 __extension__
extern __inline int8x16_t
4284 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4285 __arm_vqsubq_s8 (int8x16_t __a
, int8x16_t __b
)
4287 return __builtin_mve_vqsubq_sv16qi (__a
, __b
);
4290 __extension__
extern __inline int8x16_t
4291 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4292 __arm_vqsubq_n_s8 (int8x16_t __a
, int8_t __b
)
4294 return __builtin_mve_vqsubq_n_sv16qi (__a
, __b
);
4297 __extension__
extern __inline int8x16_t
4298 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4299 __arm_vqshlq_s8 (int8x16_t __a
, int8x16_t __b
)
4301 return __builtin_mve_vqshlq_sv16qi (__a
, __b
);
4304 __extension__
extern __inline int8x16_t
4305 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4306 __arm_vqshlq_r_s8 (int8x16_t __a
, int32_t __b
)
4308 return __builtin_mve_vqshlq_r_sv16qi (__a
, __b
);
4311 __extension__
extern __inline int8x16_t
4312 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4313 __arm_vqrshlq_s8 (int8x16_t __a
, int8x16_t __b
)
4315 return __builtin_mve_vqrshlq_sv16qi (__a
, __b
);
4318 __extension__
extern __inline int8x16_t
4319 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4320 __arm_vqrshlq_n_s8 (int8x16_t __a
, int32_t __b
)
4322 return __builtin_mve_vqrshlq_n_sv16qi (__a
, __b
);
4325 __extension__
extern __inline int8x16_t
4326 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4327 __arm_vqrdmulhq_s8 (int8x16_t __a
, int8x16_t __b
)
4329 return __builtin_mve_vqrdmulhq_sv16qi (__a
, __b
);
4332 __extension__
extern __inline int8x16_t
4333 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4334 __arm_vqrdmulhq_n_s8 (int8x16_t __a
, int8_t __b
)
4336 return __builtin_mve_vqrdmulhq_n_sv16qi (__a
, __b
);
4339 __extension__
extern __inline int8x16_t
4340 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4341 __arm_vqdmulhq_s8 (int8x16_t __a
, int8x16_t __b
)
4343 return __builtin_mve_vqdmulhq_sv16qi (__a
, __b
);
4346 __extension__
extern __inline int8x16_t
4347 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4348 __arm_vqdmulhq_n_s8 (int8x16_t __a
, int8_t __b
)
4350 return __builtin_mve_vqdmulhq_n_sv16qi (__a
, __b
);
4353 __extension__
extern __inline int8x16_t
4354 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4355 __arm_vqaddq_s8 (int8x16_t __a
, int8x16_t __b
)
4357 return __builtin_mve_vqaddq_sv16qi (__a
, __b
);
4360 __extension__
extern __inline int8x16_t
4361 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4362 __arm_vqaddq_n_s8 (int8x16_t __a
, int8_t __b
)
4364 return __builtin_mve_vqaddq_n_sv16qi (__a
, __b
);
4367 __extension__
extern __inline int8x16_t
4368 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4369 __arm_vorrq_s8 (int8x16_t __a
, int8x16_t __b
)
4371 return __builtin_mve_vorrq_sv16qi (__a
, __b
);
4374 __extension__
extern __inline int8x16_t
4375 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4376 __arm_vornq_s8 (int8x16_t __a
, int8x16_t __b
)
4378 return __builtin_mve_vornq_sv16qi (__a
, __b
);
4381 __extension__
extern __inline int8x16_t
4382 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4383 __arm_vmulq_s8 (int8x16_t __a
, int8x16_t __b
)
4385 return __builtin_mve_vmulq_sv16qi (__a
, __b
);
4388 __extension__
extern __inline int8x16_t
4389 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4390 __arm_vmulq_n_s8 (int8x16_t __a
, int8_t __b
)
4392 return __builtin_mve_vmulq_n_sv16qi (__a
, __b
);
4395 __extension__
extern __inline int16x8_t
4396 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4397 __arm_vmulltq_int_s8 (int8x16_t __a
, int8x16_t __b
)
4399 return __builtin_mve_vmulltq_int_sv16qi (__a
, __b
);
4402 __extension__
extern __inline int16x8_t
4403 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4404 __arm_vmullbq_int_s8 (int8x16_t __a
, int8x16_t __b
)
4406 return __builtin_mve_vmullbq_int_sv16qi (__a
, __b
);
4409 __extension__
extern __inline int8x16_t
4410 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4411 __arm_vmulhq_s8 (int8x16_t __a
, int8x16_t __b
)
4413 return __builtin_mve_vmulhq_sv16qi (__a
, __b
);
4416 __extension__
extern __inline
int32_t
4417 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4418 __arm_vmlsdavxq_s8 (int8x16_t __a
, int8x16_t __b
)
4420 return __builtin_mve_vmlsdavxq_sv16qi (__a
, __b
);
4423 __extension__
extern __inline
int32_t
4424 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4425 __arm_vmlsdavq_s8 (int8x16_t __a
, int8x16_t __b
)
4427 return __builtin_mve_vmlsdavq_sv16qi (__a
, __b
);
4430 __extension__
extern __inline
int32_t
4431 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4432 __arm_vmladavxq_s8 (int8x16_t __a
, int8x16_t __b
)
4434 return __builtin_mve_vmladavxq_sv16qi (__a
, __b
);
4437 __extension__
extern __inline
int32_t
4438 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4439 __arm_vmladavq_s8 (int8x16_t __a
, int8x16_t __b
)
4441 return __builtin_mve_vmladavq_sv16qi (__a
, __b
);
4444 __extension__
extern __inline
int8_t
4445 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4446 __arm_vminvq_s8 (int8_t __a
, int8x16_t __b
)
4448 return __builtin_mve_vminvq_sv16qi (__a
, __b
);
4451 __extension__
extern __inline int8x16_t
4452 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4453 __arm_vminq_s8 (int8x16_t __a
, int8x16_t __b
)
4455 return __builtin_mve_vminq_sv16qi (__a
, __b
);
4458 __extension__
extern __inline
int8_t
4459 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4460 __arm_vmaxvq_s8 (int8_t __a
, int8x16_t __b
)
4462 return __builtin_mve_vmaxvq_sv16qi (__a
, __b
);
4465 __extension__
extern __inline int8x16_t
4466 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4467 __arm_vmaxq_s8 (int8x16_t __a
, int8x16_t __b
)
4469 return __builtin_mve_vmaxq_sv16qi (__a
, __b
);
4472 __extension__
extern __inline int8x16_t
4473 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4474 __arm_vhsubq_s8 (int8x16_t __a
, int8x16_t __b
)
4476 return __builtin_mve_vhsubq_sv16qi (__a
, __b
);
4479 __extension__
extern __inline int8x16_t
4480 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4481 __arm_vhsubq_n_s8 (int8x16_t __a
, int8_t __b
)
4483 return __builtin_mve_vhsubq_n_sv16qi (__a
, __b
);
4486 __extension__
extern __inline int8x16_t
4487 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4488 __arm_vhcaddq_rot90_s8 (int8x16_t __a
, int8x16_t __b
)
4490 return __builtin_mve_vhcaddq_rot90_sv16qi (__a
, __b
);
4493 __extension__
extern __inline int8x16_t
4494 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4495 __arm_vhcaddq_rot270_s8 (int8x16_t __a
, int8x16_t __b
)
4497 return __builtin_mve_vhcaddq_rot270_sv16qi (__a
, __b
);
4500 __extension__
extern __inline int8x16_t
4501 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4502 __arm_vhaddq_s8 (int8x16_t __a
, int8x16_t __b
)
4504 return __builtin_mve_vhaddq_sv16qi (__a
, __b
);
4507 __extension__
extern __inline int8x16_t
4508 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4509 __arm_vhaddq_n_s8 (int8x16_t __a
, int8_t __b
)
4511 return __builtin_mve_vhaddq_n_sv16qi (__a
, __b
);
4514 __extension__
extern __inline int8x16_t
4515 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4516 __arm_veorq_s8 (int8x16_t __a
, int8x16_t __b
)
4518 return __builtin_mve_veorq_sv16qi (__a
, __b
);
4521 __extension__
extern __inline int8x16_t
4522 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4523 __arm_vcaddq_rot90_s8 (int8x16_t __a
, int8x16_t __b
)
4525 return __builtin_mve_vcaddq_rot90v16qi (__a
, __b
);
4528 __extension__
extern __inline int8x16_t
4529 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4530 __arm_vcaddq_rot270_s8 (int8x16_t __a
, int8x16_t __b
)
4532 return __builtin_mve_vcaddq_rot270v16qi (__a
, __b
);
4535 __extension__
extern __inline int8x16_t
4536 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4537 __arm_vbrsrq_n_s8 (int8x16_t __a
, int32_t __b
)
4539 return __builtin_mve_vbrsrq_n_sv16qi (__a
, __b
);
4542 __extension__
extern __inline int8x16_t
4543 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4544 __arm_vbicq_s8 (int8x16_t __a
, int8x16_t __b
)
4546 return __builtin_mve_vbicq_sv16qi (__a
, __b
);
4549 __extension__
extern __inline int8x16_t
4550 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4551 __arm_vandq_s8 (int8x16_t __a
, int8x16_t __b
)
4553 return __builtin_mve_vandq_sv16qi (__a
, __b
);
4556 __extension__
extern __inline
int32_t
4557 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4558 __arm_vaddvaq_s8 (int32_t __a
, int8x16_t __b
)
4560 return __builtin_mve_vaddvaq_sv16qi (__a
, __b
);
4563 __extension__
extern __inline int8x16_t
4564 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4565 __arm_vaddq_n_s8 (int8x16_t __a
, int8_t __b
)
4567 return __builtin_mve_vaddq_n_sv16qi (__a
, __b
);
4570 __extension__
extern __inline int8x16_t
4571 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4572 __arm_vabdq_s8 (int8x16_t __a
, int8x16_t __b
)
4574 return __builtin_mve_vabdq_sv16qi (__a
, __b
);
4577 __extension__
extern __inline int8x16_t
4578 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4579 __arm_vshlq_n_s8 (int8x16_t __a
, const int __imm
)
4581 return __builtin_mve_vshlq_n_sv16qi (__a
, __imm
);
4584 __extension__
extern __inline int8x16_t
4585 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4586 __arm_vrshrq_n_s8 (int8x16_t __a
, const int __imm
)
4588 return __builtin_mve_vrshrq_n_sv16qi (__a
, __imm
);
4591 __extension__
extern __inline int8x16_t
4592 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4593 __arm_vqshlq_n_s8 (int8x16_t __a
, const int __imm
)
4595 return __builtin_mve_vqshlq_n_sv16qi (__a
, __imm
);
4598 __extension__
extern __inline uint16x8_t
4599 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4600 __arm_vsubq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4602 return __builtin_mve_vsubq_uv8hi (__a
, __b
);
4605 __extension__
extern __inline uint16x8_t
4606 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4607 __arm_vsubq_n_u16 (uint16x8_t __a
, uint16_t __b
)
4609 return __builtin_mve_vsubq_n_uv8hi (__a
, __b
);
4612 __extension__
extern __inline uint16x8_t
4613 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4614 __arm_vrmulhq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4616 return __builtin_mve_vrmulhq_uv8hi (__a
, __b
);
4619 __extension__
extern __inline uint16x8_t
4620 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4621 __arm_vrhaddq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4623 return __builtin_mve_vrhaddq_uv8hi (__a
, __b
);
4626 __extension__
extern __inline uint16x8_t
4627 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4628 __arm_vqsubq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4630 return __builtin_mve_vqsubq_uv8hi (__a
, __b
);
4633 __extension__
extern __inline uint16x8_t
4634 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4635 __arm_vqsubq_n_u16 (uint16x8_t __a
, uint16_t __b
)
4637 return __builtin_mve_vqsubq_n_uv8hi (__a
, __b
);
4640 __extension__
extern __inline uint16x8_t
4641 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4642 __arm_vqaddq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4644 return __builtin_mve_vqaddq_uv8hi (__a
, __b
);
4647 __extension__
extern __inline uint16x8_t
4648 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4649 __arm_vqaddq_n_u16 (uint16x8_t __a
, uint16_t __b
)
4651 return __builtin_mve_vqaddq_n_uv8hi (__a
, __b
);
4654 __extension__
extern __inline uint16x8_t
4655 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4656 __arm_vorrq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4658 return __builtin_mve_vorrq_uv8hi (__a
, __b
);
4661 __extension__
extern __inline uint16x8_t
4662 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4663 __arm_vornq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4665 return __builtin_mve_vornq_uv8hi (__a
, __b
);
4668 __extension__
extern __inline uint16x8_t
4669 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4670 __arm_vmulq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4672 return __builtin_mve_vmulq_uv8hi (__a
, __b
);
4675 __extension__
extern __inline uint16x8_t
4676 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4677 __arm_vmulq_n_u16 (uint16x8_t __a
, uint16_t __b
)
4679 return __builtin_mve_vmulq_n_uv8hi (__a
, __b
);
4682 __extension__
extern __inline uint32x4_t
4683 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4684 __arm_vmulltq_int_u16 (uint16x8_t __a
, uint16x8_t __b
)
4686 return __builtin_mve_vmulltq_int_uv8hi (__a
, __b
);
4689 __extension__
extern __inline uint32x4_t
4690 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4691 __arm_vmullbq_int_u16 (uint16x8_t __a
, uint16x8_t __b
)
4693 return __builtin_mve_vmullbq_int_uv8hi (__a
, __b
);
4696 __extension__
extern __inline uint16x8_t
4697 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4698 __arm_vmulhq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4700 return __builtin_mve_vmulhq_uv8hi (__a
, __b
);
4703 __extension__
extern __inline
uint32_t
4704 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4705 __arm_vmladavq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4707 return __builtin_mve_vmladavq_uv8hi (__a
, __b
);
4710 __extension__
extern __inline
uint16_t
4711 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4712 __arm_vminvq_u16 (uint16_t __a
, uint16x8_t __b
)
4714 return __builtin_mve_vminvq_uv8hi (__a
, __b
);
4717 __extension__
extern __inline uint16x8_t
4718 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4719 __arm_vminq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4721 return __builtin_mve_vminq_uv8hi (__a
, __b
);
4724 __extension__
extern __inline
uint16_t
4725 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4726 __arm_vmaxvq_u16 (uint16_t __a
, uint16x8_t __b
)
4728 return __builtin_mve_vmaxvq_uv8hi (__a
, __b
);
4731 __extension__
extern __inline uint16x8_t
4732 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4733 __arm_vmaxq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4735 return __builtin_mve_vmaxq_uv8hi (__a
, __b
);
4738 __extension__
extern __inline uint16x8_t
4739 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4740 __arm_vhsubq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4742 return __builtin_mve_vhsubq_uv8hi (__a
, __b
);
4745 __extension__
extern __inline uint16x8_t
4746 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4747 __arm_vhsubq_n_u16 (uint16x8_t __a
, uint16_t __b
)
4749 return __builtin_mve_vhsubq_n_uv8hi (__a
, __b
);
4752 __extension__
extern __inline uint16x8_t
4753 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4754 __arm_vhaddq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4756 return __builtin_mve_vhaddq_uv8hi (__a
, __b
);
4759 __extension__
extern __inline uint16x8_t
4760 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4761 __arm_vhaddq_n_u16 (uint16x8_t __a
, uint16_t __b
)
4763 return __builtin_mve_vhaddq_n_uv8hi (__a
, __b
);
4766 __extension__
extern __inline uint16x8_t
4767 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4768 __arm_veorq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4770 return __builtin_mve_veorq_uv8hi (__a
, __b
);
4773 __extension__
extern __inline mve_pred16_t
4774 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4775 __arm_vcmpneq_n_u16 (uint16x8_t __a
, uint16_t __b
)
4777 return __builtin_mve_vcmpneq_n_v8hi ((int16x8_t
)__a
, (int16_t)__b
);
4780 __extension__
extern __inline mve_pred16_t
4781 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4782 __arm_vcmphiq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4784 return __builtin_mve_vcmphiq_v8hi (__a
, __b
);
4787 __extension__
extern __inline mve_pred16_t
4788 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4789 __arm_vcmphiq_n_u16 (uint16x8_t __a
, uint16_t __b
)
4791 return __builtin_mve_vcmphiq_n_v8hi (__a
, __b
);
4794 __extension__
extern __inline mve_pred16_t
4795 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4796 __arm_vcmpeqq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4798 return __builtin_mve_vcmpeqq_v8hi ((int16x8_t
)__a
, (int16x8_t
)__b
);
4801 __extension__
extern __inline mve_pred16_t
4802 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4803 __arm_vcmpeqq_n_u16 (uint16x8_t __a
, uint16_t __b
)
4805 return __builtin_mve_vcmpeqq_n_v8hi ((int16x8_t
)__a
, (int16_t)__b
);
4808 __extension__
extern __inline mve_pred16_t
4809 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4810 __arm_vcmpcsq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4812 return __builtin_mve_vcmpcsq_v8hi (__a
, __b
);
4815 __extension__
extern __inline mve_pred16_t
4816 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4817 __arm_vcmpcsq_n_u16 (uint16x8_t __a
, uint16_t __b
)
4819 return __builtin_mve_vcmpcsq_n_v8hi (__a
, __b
);
4822 __extension__
extern __inline uint16x8_t
4823 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4824 __arm_vcaddq_rot90_u16 (uint16x8_t __a
, uint16x8_t __b
)
4827 __builtin_mve_vcaddq_rot90v8hi ((int16x8_t
)__a
, (int16x8_t
)__b
);
4830 __extension__
extern __inline uint16x8_t
4831 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4832 __arm_vcaddq_rot270_u16 (uint16x8_t __a
, uint16x8_t __b
)
4835 __builtin_mve_vcaddq_rot270v8hi ((int16x8_t
)__a
, (int16x8_t
)__b
);
4838 __extension__
extern __inline uint16x8_t
4839 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4840 __arm_vbicq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4842 return __builtin_mve_vbicq_uv8hi (__a
, __b
);
4845 __extension__
extern __inline uint16x8_t
4846 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4847 __arm_vandq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4849 return __builtin_mve_vandq_uv8hi (__a
, __b
);
4852 __extension__
extern __inline
uint32_t
4853 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4854 __arm_vaddvq_p_u16 (uint16x8_t __a
, mve_pred16_t __p
)
4856 return __builtin_mve_vaddvq_p_uv8hi (__a
, __p
);
4859 __extension__
extern __inline
uint32_t
4860 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4861 __arm_vaddvaq_u16 (uint32_t __a
, uint16x8_t __b
)
4863 return __builtin_mve_vaddvaq_uv8hi (__a
, __b
);
4866 __extension__
extern __inline uint16x8_t
4867 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4868 __arm_vaddq_n_u16 (uint16x8_t __a
, uint16_t __b
)
4870 return __builtin_mve_vaddq_n_uv8hi (__a
, __b
);
4873 __extension__
extern __inline uint16x8_t
4874 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4875 __arm_vabdq_u16 (uint16x8_t __a
, uint16x8_t __b
)
4877 return __builtin_mve_vabdq_uv8hi (__a
, __b
);
4880 __extension__
extern __inline uint16x8_t
4881 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4882 __arm_vshlq_r_u16 (uint16x8_t __a
, int32_t __b
)
4884 return __builtin_mve_vshlq_r_uv8hi (__a
, __b
);
4887 __extension__
extern __inline uint16x8_t
4888 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4889 __arm_vrshlq_u16 (uint16x8_t __a
, int16x8_t __b
)
4891 return __builtin_mve_vrshlq_uv8hi (__a
, __b
);
4894 __extension__
extern __inline uint16x8_t
4895 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4896 __arm_vrshlq_n_u16 (uint16x8_t __a
, int32_t __b
)
4898 return __builtin_mve_vrshlq_n_uv8hi (__a
, __b
);
4901 __extension__
extern __inline uint16x8_t
4902 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4903 __arm_vqshlq_u16 (uint16x8_t __a
, int16x8_t __b
)
4905 return __builtin_mve_vqshlq_uv8hi (__a
, __b
);
4908 __extension__
extern __inline uint16x8_t
4909 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4910 __arm_vqshlq_r_u16 (uint16x8_t __a
, int32_t __b
)
4912 return __builtin_mve_vqshlq_r_uv8hi (__a
, __b
);
4915 __extension__
extern __inline uint16x8_t
4916 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4917 __arm_vqrshlq_u16 (uint16x8_t __a
, int16x8_t __b
)
4919 return __builtin_mve_vqrshlq_uv8hi (__a
, __b
);
4922 __extension__
extern __inline uint16x8_t
4923 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4924 __arm_vqrshlq_n_u16 (uint16x8_t __a
, int32_t __b
)
4926 return __builtin_mve_vqrshlq_n_uv8hi (__a
, __b
);
4929 __extension__
extern __inline
uint16_t
4930 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4931 __arm_vminavq_s16 (uint16_t __a
, int16x8_t __b
)
4933 return __builtin_mve_vminavq_sv8hi (__a
, __b
);
4936 __extension__
extern __inline uint16x8_t
4937 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4938 __arm_vminaq_s16 (uint16x8_t __a
, int16x8_t __b
)
4940 return __builtin_mve_vminaq_sv8hi (__a
, __b
);
4943 __extension__
extern __inline
uint16_t
4944 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4945 __arm_vmaxavq_s16 (uint16_t __a
, int16x8_t __b
)
4947 return __builtin_mve_vmaxavq_sv8hi (__a
, __b
);
4950 __extension__
extern __inline uint16x8_t
4951 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4952 __arm_vmaxaq_s16 (uint16x8_t __a
, int16x8_t __b
)
4954 return __builtin_mve_vmaxaq_sv8hi (__a
, __b
);
4957 __extension__
extern __inline uint16x8_t
4958 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4959 __arm_vbrsrq_n_u16 (uint16x8_t __a
, int32_t __b
)
4961 return __builtin_mve_vbrsrq_n_uv8hi (__a
, __b
);
4964 __extension__
extern __inline uint16x8_t
4965 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4966 __arm_vshlq_n_u16 (uint16x8_t __a
, const int __imm
)
4968 return __builtin_mve_vshlq_n_uv8hi (__a
, __imm
);
4971 __extension__
extern __inline uint16x8_t
4972 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4973 __arm_vrshrq_n_u16 (uint16x8_t __a
, const int __imm
)
4975 return __builtin_mve_vrshrq_n_uv8hi (__a
, __imm
);
4978 __extension__
extern __inline uint16x8_t
4979 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4980 __arm_vqshlq_n_u16 (uint16x8_t __a
, const int __imm
)
4982 return __builtin_mve_vqshlq_n_uv8hi (__a
, __imm
);
4985 __extension__
extern __inline mve_pred16_t
4986 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4987 __arm_vcmpneq_n_s16 (int16x8_t __a
, int16_t __b
)
4989 return __builtin_mve_vcmpneq_n_v8hi (__a
, __b
);
4992 __extension__
extern __inline mve_pred16_t
4993 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
4994 __arm_vcmpltq_s16 (int16x8_t __a
, int16x8_t __b
)
4996 return __builtin_mve_vcmpltq_v8hi (__a
, __b
);
4999 __extension__
extern __inline mve_pred16_t
5000 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5001 __arm_vcmpltq_n_s16 (int16x8_t __a
, int16_t __b
)
5003 return __builtin_mve_vcmpltq_n_v8hi (__a
, __b
);
5006 __extension__
extern __inline mve_pred16_t
5007 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5008 __arm_vcmpleq_s16 (int16x8_t __a
, int16x8_t __b
)
5010 return __builtin_mve_vcmpleq_v8hi (__a
, __b
);
5013 __extension__
extern __inline mve_pred16_t
5014 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5015 __arm_vcmpleq_n_s16 (int16x8_t __a
, int16_t __b
)
5017 return __builtin_mve_vcmpleq_n_v8hi (__a
, __b
);
5020 __extension__
extern __inline mve_pred16_t
5021 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5022 __arm_vcmpgtq_s16 (int16x8_t __a
, int16x8_t __b
)
5024 return __builtin_mve_vcmpgtq_v8hi (__a
, __b
);
5027 __extension__
extern __inline mve_pred16_t
5028 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5029 __arm_vcmpgtq_n_s16 (int16x8_t __a
, int16_t __b
)
5031 return __builtin_mve_vcmpgtq_n_v8hi (__a
, __b
);
5034 __extension__
extern __inline mve_pred16_t
5035 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5036 __arm_vcmpgeq_s16 (int16x8_t __a
, int16x8_t __b
)
5038 return __builtin_mve_vcmpgeq_v8hi (__a
, __b
);
5041 __extension__
extern __inline mve_pred16_t
5042 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5043 __arm_vcmpgeq_n_s16 (int16x8_t __a
, int16_t __b
)
5045 return __builtin_mve_vcmpgeq_n_v8hi (__a
, __b
);
5048 __extension__
extern __inline mve_pred16_t
5049 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5050 __arm_vcmpeqq_s16 (int16x8_t __a
, int16x8_t __b
)
5052 return __builtin_mve_vcmpeqq_v8hi (__a
, __b
);
5055 __extension__
extern __inline mve_pred16_t
5056 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5057 __arm_vcmpeqq_n_s16 (int16x8_t __a
, int16_t __b
)
5059 return __builtin_mve_vcmpeqq_n_v8hi (__a
, __b
);
5062 __extension__
extern __inline uint16x8_t
5063 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5064 __arm_vqshluq_n_s16 (int16x8_t __a
, const int __imm
)
5066 return __builtin_mve_vqshluq_n_sv8hi (__a
, __imm
);
5069 __extension__
extern __inline
int32_t
5070 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5071 __arm_vaddvq_p_s16 (int16x8_t __a
, mve_pred16_t __p
)
5073 return __builtin_mve_vaddvq_p_sv8hi (__a
, __p
);
5076 __extension__
extern __inline int16x8_t
5077 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5078 __arm_vsubq_s16 (int16x8_t __a
, int16x8_t __b
)
5080 return __builtin_mve_vsubq_sv8hi (__a
, __b
);
5083 __extension__
extern __inline int16x8_t
5084 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5085 __arm_vsubq_n_s16 (int16x8_t __a
, int16_t __b
)
5087 return __builtin_mve_vsubq_n_sv8hi (__a
, __b
);
5090 __extension__
extern __inline int16x8_t
5091 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5092 __arm_vshlq_r_s16 (int16x8_t __a
, int32_t __b
)
5094 return __builtin_mve_vshlq_r_sv8hi (__a
, __b
);
5097 __extension__
extern __inline int16x8_t
5098 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5099 __arm_vrshlq_s16 (int16x8_t __a
, int16x8_t __b
)
5101 return __builtin_mve_vrshlq_sv8hi (__a
, __b
);
5104 __extension__
extern __inline int16x8_t
5105 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5106 __arm_vrshlq_n_s16 (int16x8_t __a
, int32_t __b
)
5108 return __builtin_mve_vrshlq_n_sv8hi (__a
, __b
);
5111 __extension__
extern __inline int16x8_t
5112 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5113 __arm_vrmulhq_s16 (int16x8_t __a
, int16x8_t __b
)
5115 return __builtin_mve_vrmulhq_sv8hi (__a
, __b
);
5118 __extension__
extern __inline int16x8_t
5119 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5120 __arm_vrhaddq_s16 (int16x8_t __a
, int16x8_t __b
)
5122 return __builtin_mve_vrhaddq_sv8hi (__a
, __b
);
5125 __extension__
extern __inline int16x8_t
5126 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5127 __arm_vqsubq_s16 (int16x8_t __a
, int16x8_t __b
)
5129 return __builtin_mve_vqsubq_sv8hi (__a
, __b
);
5132 __extension__
extern __inline int16x8_t
5133 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5134 __arm_vqsubq_n_s16 (int16x8_t __a
, int16_t __b
)
5136 return __builtin_mve_vqsubq_n_sv8hi (__a
, __b
);
5139 __extension__
extern __inline int16x8_t
5140 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5141 __arm_vqshlq_s16 (int16x8_t __a
, int16x8_t __b
)
5143 return __builtin_mve_vqshlq_sv8hi (__a
, __b
);
5146 __extension__
extern __inline int16x8_t
5147 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5148 __arm_vqshlq_r_s16 (int16x8_t __a
, int32_t __b
)
5150 return __builtin_mve_vqshlq_r_sv8hi (__a
, __b
);
5153 __extension__
extern __inline int16x8_t
5154 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5155 __arm_vqrshlq_s16 (int16x8_t __a
, int16x8_t __b
)
5157 return __builtin_mve_vqrshlq_sv8hi (__a
, __b
);
5160 __extension__
extern __inline int16x8_t
5161 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5162 __arm_vqrshlq_n_s16 (int16x8_t __a
, int32_t __b
)
5164 return __builtin_mve_vqrshlq_n_sv8hi (__a
, __b
);
5167 __extension__
extern __inline int16x8_t
5168 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5169 __arm_vqrdmulhq_s16 (int16x8_t __a
, int16x8_t __b
)
5171 return __builtin_mve_vqrdmulhq_sv8hi (__a
, __b
);
5174 __extension__
extern __inline int16x8_t
5175 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5176 __arm_vqrdmulhq_n_s16 (int16x8_t __a
, int16_t __b
)
5178 return __builtin_mve_vqrdmulhq_n_sv8hi (__a
, __b
);
5181 __extension__
extern __inline int16x8_t
5182 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5183 __arm_vqdmulhq_s16 (int16x8_t __a
, int16x8_t __b
)
5185 return __builtin_mve_vqdmulhq_sv8hi (__a
, __b
);
5188 __extension__
extern __inline int16x8_t
5189 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5190 __arm_vqdmulhq_n_s16 (int16x8_t __a
, int16_t __b
)
5192 return __builtin_mve_vqdmulhq_n_sv8hi (__a
, __b
);
5195 __extension__
extern __inline int16x8_t
5196 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5197 __arm_vqaddq_s16 (int16x8_t __a
, int16x8_t __b
)
5199 return __builtin_mve_vqaddq_sv8hi (__a
, __b
);
5202 __extension__
extern __inline int16x8_t
5203 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5204 __arm_vqaddq_n_s16 (int16x8_t __a
, int16_t __b
)
5206 return __builtin_mve_vqaddq_n_sv8hi (__a
, __b
);
5209 __extension__
extern __inline int16x8_t
5210 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5211 __arm_vorrq_s16 (int16x8_t __a
, int16x8_t __b
)
5213 return __builtin_mve_vorrq_sv8hi (__a
, __b
);
5216 __extension__
extern __inline int16x8_t
5217 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5218 __arm_vornq_s16 (int16x8_t __a
, int16x8_t __b
)
5220 return __builtin_mve_vornq_sv8hi (__a
, __b
);
5223 __extension__
extern __inline int16x8_t
5224 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5225 __arm_vmulq_s16 (int16x8_t __a
, int16x8_t __b
)
5227 return __builtin_mve_vmulq_sv8hi (__a
, __b
);
5230 __extension__
extern __inline int16x8_t
5231 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5232 __arm_vmulq_n_s16 (int16x8_t __a
, int16_t __b
)
5234 return __builtin_mve_vmulq_n_sv8hi (__a
, __b
);
5237 __extension__
extern __inline int32x4_t
5238 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5239 __arm_vmulltq_int_s16 (int16x8_t __a
, int16x8_t __b
)
5241 return __builtin_mve_vmulltq_int_sv8hi (__a
, __b
);
5244 __extension__
extern __inline int32x4_t
5245 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5246 __arm_vmullbq_int_s16 (int16x8_t __a
, int16x8_t __b
)
5248 return __builtin_mve_vmullbq_int_sv8hi (__a
, __b
);
5251 __extension__
extern __inline int16x8_t
5252 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5253 __arm_vmulhq_s16 (int16x8_t __a
, int16x8_t __b
)
5255 return __builtin_mve_vmulhq_sv8hi (__a
, __b
);
5258 __extension__
extern __inline
int32_t
5259 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5260 __arm_vmlsdavxq_s16 (int16x8_t __a
, int16x8_t __b
)
5262 return __builtin_mve_vmlsdavxq_sv8hi (__a
, __b
);
5265 __extension__
extern __inline
int32_t
5266 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5267 __arm_vmlsdavq_s16 (int16x8_t __a
, int16x8_t __b
)
5269 return __builtin_mve_vmlsdavq_sv8hi (__a
, __b
);
5272 __extension__
extern __inline
int32_t
5273 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5274 __arm_vmladavxq_s16 (int16x8_t __a
, int16x8_t __b
)
5276 return __builtin_mve_vmladavxq_sv8hi (__a
, __b
);
5279 __extension__
extern __inline
int32_t
5280 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5281 __arm_vmladavq_s16 (int16x8_t __a
, int16x8_t __b
)
5283 return __builtin_mve_vmladavq_sv8hi (__a
, __b
);
5286 __extension__
extern __inline
int16_t
5287 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5288 __arm_vminvq_s16 (int16_t __a
, int16x8_t __b
)
5290 return __builtin_mve_vminvq_sv8hi (__a
, __b
);
5293 __extension__
extern __inline int16x8_t
5294 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5295 __arm_vminq_s16 (int16x8_t __a
, int16x8_t __b
)
5297 return __builtin_mve_vminq_sv8hi (__a
, __b
);
5300 __extension__
extern __inline
int16_t
5301 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5302 __arm_vmaxvq_s16 (int16_t __a
, int16x8_t __b
)
5304 return __builtin_mve_vmaxvq_sv8hi (__a
, __b
);
5307 __extension__
extern __inline int16x8_t
5308 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5309 __arm_vmaxq_s16 (int16x8_t __a
, int16x8_t __b
)
5311 return __builtin_mve_vmaxq_sv8hi (__a
, __b
);
5314 __extension__
extern __inline int16x8_t
5315 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5316 __arm_vhsubq_s16 (int16x8_t __a
, int16x8_t __b
)
5318 return __builtin_mve_vhsubq_sv8hi (__a
, __b
);
5321 __extension__
extern __inline int16x8_t
5322 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5323 __arm_vhsubq_n_s16 (int16x8_t __a
, int16_t __b
)
5325 return __builtin_mve_vhsubq_n_sv8hi (__a
, __b
);
5328 __extension__
extern __inline int16x8_t
5329 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5330 __arm_vhcaddq_rot90_s16 (int16x8_t __a
, int16x8_t __b
)
5332 return __builtin_mve_vhcaddq_rot90_sv8hi (__a
, __b
);
5335 __extension__
extern __inline int16x8_t
5336 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5337 __arm_vhcaddq_rot270_s16 (int16x8_t __a
, int16x8_t __b
)
5339 return __builtin_mve_vhcaddq_rot270_sv8hi (__a
, __b
);
5342 __extension__
extern __inline int16x8_t
5343 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5344 __arm_vhaddq_s16 (int16x8_t __a
, int16x8_t __b
)
5346 return __builtin_mve_vhaddq_sv8hi (__a
, __b
);
5349 __extension__
extern __inline int16x8_t
5350 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5351 __arm_vhaddq_n_s16 (int16x8_t __a
, int16_t __b
)
5353 return __builtin_mve_vhaddq_n_sv8hi (__a
, __b
);
5356 __extension__
extern __inline int16x8_t
5357 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5358 __arm_veorq_s16 (int16x8_t __a
, int16x8_t __b
)
5360 return __builtin_mve_veorq_sv8hi (__a
, __b
);
5363 __extension__
extern __inline int16x8_t
5364 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5365 __arm_vcaddq_rot90_s16 (int16x8_t __a
, int16x8_t __b
)
5367 return __builtin_mve_vcaddq_rot90v8hi (__a
, __b
);
5370 __extension__
extern __inline int16x8_t
5371 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5372 __arm_vcaddq_rot270_s16 (int16x8_t __a
, int16x8_t __b
)
5374 return __builtin_mve_vcaddq_rot270v8hi (__a
, __b
);
5377 __extension__
extern __inline int16x8_t
5378 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5379 __arm_vbrsrq_n_s16 (int16x8_t __a
, int32_t __b
)
5381 return __builtin_mve_vbrsrq_n_sv8hi (__a
, __b
);
5384 __extension__
extern __inline int16x8_t
5385 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5386 __arm_vbicq_s16 (int16x8_t __a
, int16x8_t __b
)
5388 return __builtin_mve_vbicq_sv8hi (__a
, __b
);
5391 __extension__
extern __inline int16x8_t
5392 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5393 __arm_vandq_s16 (int16x8_t __a
, int16x8_t __b
)
5395 return __builtin_mve_vandq_sv8hi (__a
, __b
);
5398 __extension__
extern __inline
int32_t
5399 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5400 __arm_vaddvaq_s16 (int32_t __a
, int16x8_t __b
)
5402 return __builtin_mve_vaddvaq_sv8hi (__a
, __b
);
5405 __extension__
extern __inline int16x8_t
5406 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5407 __arm_vaddq_n_s16 (int16x8_t __a
, int16_t __b
)
5409 return __builtin_mve_vaddq_n_sv8hi (__a
, __b
);
5412 __extension__
extern __inline int16x8_t
5413 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5414 __arm_vabdq_s16 (int16x8_t __a
, int16x8_t __b
)
5416 return __builtin_mve_vabdq_sv8hi (__a
, __b
);
5419 __extension__
extern __inline int16x8_t
5420 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5421 __arm_vshlq_n_s16 (int16x8_t __a
, const int __imm
)
5423 return __builtin_mve_vshlq_n_sv8hi (__a
, __imm
);
5426 __extension__
extern __inline int16x8_t
5427 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5428 __arm_vrshrq_n_s16 (int16x8_t __a
, const int __imm
)
5430 return __builtin_mve_vrshrq_n_sv8hi (__a
, __imm
);
5433 __extension__
extern __inline int16x8_t
5434 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5435 __arm_vqshlq_n_s16 (int16x8_t __a
, const int __imm
)
5437 return __builtin_mve_vqshlq_n_sv8hi (__a
, __imm
);
5440 __extension__
extern __inline uint32x4_t
5441 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5442 __arm_vsubq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5444 return __builtin_mve_vsubq_uv4si (__a
, __b
);
5447 __extension__
extern __inline uint32x4_t
5448 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5449 __arm_vsubq_n_u32 (uint32x4_t __a
, uint32_t __b
)
5451 return __builtin_mve_vsubq_n_uv4si (__a
, __b
);
5454 __extension__
extern __inline uint32x4_t
5455 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5456 __arm_vrmulhq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5458 return __builtin_mve_vrmulhq_uv4si (__a
, __b
);
5461 __extension__
extern __inline uint32x4_t
5462 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5463 __arm_vrhaddq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5465 return __builtin_mve_vrhaddq_uv4si (__a
, __b
);
5468 __extension__
extern __inline uint32x4_t
5469 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5470 __arm_vqsubq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5472 return __builtin_mve_vqsubq_uv4si (__a
, __b
);
5475 __extension__
extern __inline uint32x4_t
5476 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5477 __arm_vqsubq_n_u32 (uint32x4_t __a
, uint32_t __b
)
5479 return __builtin_mve_vqsubq_n_uv4si (__a
, __b
);
5482 __extension__
extern __inline uint32x4_t
5483 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5484 __arm_vqaddq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5486 return __builtin_mve_vqaddq_uv4si (__a
, __b
);
5489 __extension__
extern __inline uint32x4_t
5490 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5491 __arm_vqaddq_n_u32 (uint32x4_t __a
, uint32_t __b
)
5493 return __builtin_mve_vqaddq_n_uv4si (__a
, __b
);
5496 __extension__
extern __inline uint32x4_t
5497 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5498 __arm_vorrq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5500 return __builtin_mve_vorrq_uv4si (__a
, __b
);
5503 __extension__
extern __inline uint32x4_t
5504 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5505 __arm_vornq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5507 return __builtin_mve_vornq_uv4si (__a
, __b
);
5510 __extension__
extern __inline uint32x4_t
5511 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5512 __arm_vmulq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5514 return __builtin_mve_vmulq_uv4si (__a
, __b
);
5517 __extension__
extern __inline uint32x4_t
5518 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5519 __arm_vmulq_n_u32 (uint32x4_t __a
, uint32_t __b
)
5521 return __builtin_mve_vmulq_n_uv4si (__a
, __b
);
5524 __extension__
extern __inline uint64x2_t
5525 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5526 __arm_vmulltq_int_u32 (uint32x4_t __a
, uint32x4_t __b
)
5528 return __builtin_mve_vmulltq_int_uv4si (__a
, __b
);
5531 __extension__
extern __inline uint64x2_t
5532 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5533 __arm_vmullbq_int_u32 (uint32x4_t __a
, uint32x4_t __b
)
5535 return __builtin_mve_vmullbq_int_uv4si (__a
, __b
);
5538 __extension__
extern __inline uint32x4_t
5539 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5540 __arm_vmulhq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5542 return __builtin_mve_vmulhq_uv4si (__a
, __b
);
5545 __extension__
extern __inline
uint32_t
5546 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5547 __arm_vmladavq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5549 return __builtin_mve_vmladavq_uv4si (__a
, __b
);
5552 __extension__
extern __inline
uint32_t
5553 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5554 __arm_vminvq_u32 (uint32_t __a
, uint32x4_t __b
)
5556 return __builtin_mve_vminvq_uv4si (__a
, __b
);
5559 __extension__
extern __inline uint32x4_t
5560 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5561 __arm_vminq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5563 return __builtin_mve_vminq_uv4si (__a
, __b
);
5566 __extension__
extern __inline
uint32_t
5567 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5568 __arm_vmaxvq_u32 (uint32_t __a
, uint32x4_t __b
)
5570 return __builtin_mve_vmaxvq_uv4si (__a
, __b
);
5573 __extension__
extern __inline uint32x4_t
5574 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5575 __arm_vmaxq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5577 return __builtin_mve_vmaxq_uv4si (__a
, __b
);
5580 __extension__
extern __inline uint32x4_t
5581 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5582 __arm_vhsubq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5584 return __builtin_mve_vhsubq_uv4si (__a
, __b
);
5587 __extension__
extern __inline uint32x4_t
5588 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5589 __arm_vhsubq_n_u32 (uint32x4_t __a
, uint32_t __b
)
5591 return __builtin_mve_vhsubq_n_uv4si (__a
, __b
);
5594 __extension__
extern __inline uint32x4_t
5595 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5596 __arm_vhaddq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5598 return __builtin_mve_vhaddq_uv4si (__a
, __b
);
5601 __extension__
extern __inline uint32x4_t
5602 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5603 __arm_vhaddq_n_u32 (uint32x4_t __a
, uint32_t __b
)
5605 return __builtin_mve_vhaddq_n_uv4si (__a
, __b
);
5608 __extension__
extern __inline uint32x4_t
5609 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5610 __arm_veorq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5612 return __builtin_mve_veorq_uv4si (__a
, __b
);
5615 __extension__
extern __inline mve_pred16_t
5616 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5617 __arm_vcmpneq_n_u32 (uint32x4_t __a
, uint32_t __b
)
5619 return __builtin_mve_vcmpneq_n_v4si ((int32x4_t
)__a
, (int32_t)__b
);
5622 __extension__
extern __inline mve_pred16_t
5623 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5624 __arm_vcmphiq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5626 return __builtin_mve_vcmphiq_v4si (__a
, __b
);
5629 __extension__
extern __inline mve_pred16_t
5630 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5631 __arm_vcmphiq_n_u32 (uint32x4_t __a
, uint32_t __b
)
5633 return __builtin_mve_vcmphiq_n_v4si (__a
, __b
);
5636 __extension__
extern __inline mve_pred16_t
5637 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5638 __arm_vcmpeqq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5640 return __builtin_mve_vcmpeqq_v4si ((int32x4_t
)__a
, (int32x4_t
)__b
);
5643 __extension__
extern __inline mve_pred16_t
5644 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5645 __arm_vcmpeqq_n_u32 (uint32x4_t __a
, uint32_t __b
)
5647 return __builtin_mve_vcmpeqq_n_v4si ((int32x4_t
)__a
, (int32_t)__b
);
5650 __extension__
extern __inline mve_pred16_t
5651 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5652 __arm_vcmpcsq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5654 return __builtin_mve_vcmpcsq_v4si (__a
, __b
);
5657 __extension__
extern __inline mve_pred16_t
5658 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5659 __arm_vcmpcsq_n_u32 (uint32x4_t __a
, uint32_t __b
)
5661 return __builtin_mve_vcmpcsq_n_v4si (__a
, __b
);
5664 __extension__
extern __inline uint32x4_t
5665 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5666 __arm_vcaddq_rot90_u32 (uint32x4_t __a
, uint32x4_t __b
)
5669 __builtin_mve_vcaddq_rot90v4si ((int32x4_t
)__a
, (int32x4_t
)__b
);
5672 __extension__
extern __inline uint32x4_t
5673 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5674 __arm_vcaddq_rot270_u32 (uint32x4_t __a
, uint32x4_t __b
)
5677 __builtin_mve_vcaddq_rot270v4si ((int32x4_t
)__a
, (int32x4_t
)__b
);
5680 __extension__
extern __inline uint32x4_t
5681 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5682 __arm_vbicq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5684 return __builtin_mve_vbicq_uv4si (__a
, __b
);
5687 __extension__
extern __inline uint32x4_t
5688 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5689 __arm_vandq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5691 return __builtin_mve_vandq_uv4si (__a
, __b
);
5694 __extension__
extern __inline
uint32_t
5695 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5696 __arm_vaddvq_p_u32 (uint32x4_t __a
, mve_pred16_t __p
)
5698 return __builtin_mve_vaddvq_p_uv4si (__a
, __p
);
5701 __extension__
extern __inline
uint32_t
5702 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5703 __arm_vaddvaq_u32 (uint32_t __a
, uint32x4_t __b
)
5705 return __builtin_mve_vaddvaq_uv4si (__a
, __b
);
5708 __extension__
extern __inline uint32x4_t
5709 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5710 __arm_vaddq_n_u32 (uint32x4_t __a
, uint32_t __b
)
5712 return __builtin_mve_vaddq_n_uv4si (__a
, __b
);
5715 __extension__
extern __inline uint32x4_t
5716 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5717 __arm_vabdq_u32 (uint32x4_t __a
, uint32x4_t __b
)
5719 return __builtin_mve_vabdq_uv4si (__a
, __b
);
5722 __extension__
extern __inline uint32x4_t
5723 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5724 __arm_vshlq_r_u32 (uint32x4_t __a
, int32_t __b
)
5726 return __builtin_mve_vshlq_r_uv4si (__a
, __b
);
5729 __extension__
extern __inline uint32x4_t
5730 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5731 __arm_vrshlq_u32 (uint32x4_t __a
, int32x4_t __b
)
5733 return __builtin_mve_vrshlq_uv4si (__a
, __b
);
5736 __extension__
extern __inline uint32x4_t
5737 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5738 __arm_vrshlq_n_u32 (uint32x4_t __a
, int32_t __b
)
5740 return __builtin_mve_vrshlq_n_uv4si (__a
, __b
);
5743 __extension__
extern __inline uint32x4_t
5744 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5745 __arm_vqshlq_u32 (uint32x4_t __a
, int32x4_t __b
)
5747 return __builtin_mve_vqshlq_uv4si (__a
, __b
);
5750 __extension__
extern __inline uint32x4_t
5751 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5752 __arm_vqshlq_r_u32 (uint32x4_t __a
, int32_t __b
)
5754 return __builtin_mve_vqshlq_r_uv4si (__a
, __b
);
5757 __extension__
extern __inline uint32x4_t
5758 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5759 __arm_vqrshlq_u32 (uint32x4_t __a
, int32x4_t __b
)
5761 return __builtin_mve_vqrshlq_uv4si (__a
, __b
);
5764 __extension__
extern __inline uint32x4_t
5765 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5766 __arm_vqrshlq_n_u32 (uint32x4_t __a
, int32_t __b
)
5768 return __builtin_mve_vqrshlq_n_uv4si (__a
, __b
);
5771 __extension__
extern __inline
uint32_t
5772 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5773 __arm_vminavq_s32 (uint32_t __a
, int32x4_t __b
)
5775 return __builtin_mve_vminavq_sv4si (__a
, __b
);
5778 __extension__
extern __inline uint32x4_t
5779 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5780 __arm_vminaq_s32 (uint32x4_t __a
, int32x4_t __b
)
5782 return __builtin_mve_vminaq_sv4si (__a
, __b
);
5785 __extension__
extern __inline
uint32_t
5786 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5787 __arm_vmaxavq_s32 (uint32_t __a
, int32x4_t __b
)
5789 return __builtin_mve_vmaxavq_sv4si (__a
, __b
);
5792 __extension__
extern __inline uint32x4_t
5793 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5794 __arm_vmaxaq_s32 (uint32x4_t __a
, int32x4_t __b
)
5796 return __builtin_mve_vmaxaq_sv4si (__a
, __b
);
5799 __extension__
extern __inline uint32x4_t
5800 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5801 __arm_vbrsrq_n_u32 (uint32x4_t __a
, int32_t __b
)
5803 return __builtin_mve_vbrsrq_n_uv4si (__a
, __b
);
5806 __extension__
extern __inline uint32x4_t
5807 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5808 __arm_vshlq_n_u32 (uint32x4_t __a
, const int __imm
)
5810 return __builtin_mve_vshlq_n_uv4si (__a
, __imm
);
5813 __extension__
extern __inline uint32x4_t
5814 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5815 __arm_vrshrq_n_u32 (uint32x4_t __a
, const int __imm
)
5817 return __builtin_mve_vrshrq_n_uv4si (__a
, __imm
);
5820 __extension__
extern __inline uint32x4_t
5821 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5822 __arm_vqshlq_n_u32 (uint32x4_t __a
, const int __imm
)
5824 return __builtin_mve_vqshlq_n_uv4si (__a
, __imm
);
5827 __extension__
extern __inline mve_pred16_t
5828 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5829 __arm_vcmpneq_n_s32 (int32x4_t __a
, int32_t __b
)
5831 return __builtin_mve_vcmpneq_n_v4si (__a
, __b
);
5834 __extension__
extern __inline mve_pred16_t
5835 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5836 __arm_vcmpltq_s32 (int32x4_t __a
, int32x4_t __b
)
5838 return __builtin_mve_vcmpltq_v4si (__a
, __b
);
5841 __extension__
extern __inline mve_pred16_t
5842 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5843 __arm_vcmpltq_n_s32 (int32x4_t __a
, int32_t __b
)
5845 return __builtin_mve_vcmpltq_n_v4si (__a
, __b
);
5848 __extension__
extern __inline mve_pred16_t
5849 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5850 __arm_vcmpleq_s32 (int32x4_t __a
, int32x4_t __b
)
5852 return __builtin_mve_vcmpleq_v4si (__a
, __b
);
5855 __extension__
extern __inline mve_pred16_t
5856 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5857 __arm_vcmpleq_n_s32 (int32x4_t __a
, int32_t __b
)
5859 return __builtin_mve_vcmpleq_n_v4si (__a
, __b
);
5862 __extension__
extern __inline mve_pred16_t
5863 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5864 __arm_vcmpgtq_s32 (int32x4_t __a
, int32x4_t __b
)
5866 return __builtin_mve_vcmpgtq_v4si (__a
, __b
);
5869 __extension__
extern __inline mve_pred16_t
5870 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5871 __arm_vcmpgtq_n_s32 (int32x4_t __a
, int32_t __b
)
5873 return __builtin_mve_vcmpgtq_n_v4si (__a
, __b
);
5876 __extension__
extern __inline mve_pred16_t
5877 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5878 __arm_vcmpgeq_s32 (int32x4_t __a
, int32x4_t __b
)
5880 return __builtin_mve_vcmpgeq_v4si (__a
, __b
);
5883 __extension__
extern __inline mve_pred16_t
5884 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5885 __arm_vcmpgeq_n_s32 (int32x4_t __a
, int32_t __b
)
5887 return __builtin_mve_vcmpgeq_n_v4si (__a
, __b
);
5890 __extension__
extern __inline mve_pred16_t
5891 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5892 __arm_vcmpeqq_s32 (int32x4_t __a
, int32x4_t __b
)
5894 return __builtin_mve_vcmpeqq_v4si (__a
, __b
);
5897 __extension__
extern __inline mve_pred16_t
5898 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5899 __arm_vcmpeqq_n_s32 (int32x4_t __a
, int32_t __b
)
5901 return __builtin_mve_vcmpeqq_n_v4si (__a
, __b
);
5904 __extension__
extern __inline uint32x4_t
5905 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5906 __arm_vqshluq_n_s32 (int32x4_t __a
, const int __imm
)
5908 return __builtin_mve_vqshluq_n_sv4si (__a
, __imm
);
5911 __extension__
extern __inline
int32_t
5912 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5913 __arm_vaddvq_p_s32 (int32x4_t __a
, mve_pred16_t __p
)
5915 return __builtin_mve_vaddvq_p_sv4si (__a
, __p
);
5918 __extension__
extern __inline int32x4_t
5919 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5920 __arm_vsubq_s32 (int32x4_t __a
, int32x4_t __b
)
5922 return __builtin_mve_vsubq_sv4si (__a
, __b
);
5925 __extension__
extern __inline int32x4_t
5926 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5927 __arm_vsubq_n_s32 (int32x4_t __a
, int32_t __b
)
5929 return __builtin_mve_vsubq_n_sv4si (__a
, __b
);
5932 __extension__
extern __inline int32x4_t
5933 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5934 __arm_vshlq_r_s32 (int32x4_t __a
, int32_t __b
)
5936 return __builtin_mve_vshlq_r_sv4si (__a
, __b
);
5939 __extension__
extern __inline int32x4_t
5940 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5941 __arm_vrshlq_s32 (int32x4_t __a
, int32x4_t __b
)
5943 return __builtin_mve_vrshlq_sv4si (__a
, __b
);
5946 __extension__
extern __inline int32x4_t
5947 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5948 __arm_vrshlq_n_s32 (int32x4_t __a
, int32_t __b
)
5950 return __builtin_mve_vrshlq_n_sv4si (__a
, __b
);
5953 __extension__
extern __inline int32x4_t
5954 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5955 __arm_vrmulhq_s32 (int32x4_t __a
, int32x4_t __b
)
5957 return __builtin_mve_vrmulhq_sv4si (__a
, __b
);
5960 __extension__
extern __inline int32x4_t
5961 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5962 __arm_vrhaddq_s32 (int32x4_t __a
, int32x4_t __b
)
5964 return __builtin_mve_vrhaddq_sv4si (__a
, __b
);
5967 __extension__
extern __inline int32x4_t
5968 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5969 __arm_vqsubq_s32 (int32x4_t __a
, int32x4_t __b
)
5971 return __builtin_mve_vqsubq_sv4si (__a
, __b
);
5974 __extension__
extern __inline int32x4_t
5975 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5976 __arm_vqsubq_n_s32 (int32x4_t __a
, int32_t __b
)
5978 return __builtin_mve_vqsubq_n_sv4si (__a
, __b
);
5981 __extension__
extern __inline int32x4_t
5982 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5983 __arm_vqshlq_s32 (int32x4_t __a
, int32x4_t __b
)
5985 return __builtin_mve_vqshlq_sv4si (__a
, __b
);
5988 __extension__
extern __inline int32x4_t
5989 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5990 __arm_vqshlq_r_s32 (int32x4_t __a
, int32_t __b
)
5992 return __builtin_mve_vqshlq_r_sv4si (__a
, __b
);
5995 __extension__
extern __inline int32x4_t
5996 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
5997 __arm_vqrshlq_s32 (int32x4_t __a
, int32x4_t __b
)
5999 return __builtin_mve_vqrshlq_sv4si (__a
, __b
);
6002 __extension__
extern __inline int32x4_t
6003 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6004 __arm_vqrshlq_n_s32 (int32x4_t __a
, int32_t __b
)
6006 return __builtin_mve_vqrshlq_n_sv4si (__a
, __b
);
6009 __extension__
extern __inline int32x4_t
6010 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6011 __arm_vqrdmulhq_s32 (int32x4_t __a
, int32x4_t __b
)
6013 return __builtin_mve_vqrdmulhq_sv4si (__a
, __b
);
6016 __extension__
extern __inline int32x4_t
6017 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6018 __arm_vqrdmulhq_n_s32 (int32x4_t __a
, int32_t __b
)
6020 return __builtin_mve_vqrdmulhq_n_sv4si (__a
, __b
);
6023 __extension__
extern __inline int32x4_t
6024 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6025 __arm_vqdmulhq_s32 (int32x4_t __a
, int32x4_t __b
)
6027 return __builtin_mve_vqdmulhq_sv4si (__a
, __b
);
6030 __extension__
extern __inline int32x4_t
6031 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6032 __arm_vqdmulhq_n_s32 (int32x4_t __a
, int32_t __b
)
6034 return __builtin_mve_vqdmulhq_n_sv4si (__a
, __b
);
6037 __extension__
extern __inline int32x4_t
6038 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6039 __arm_vqaddq_s32 (int32x4_t __a
, int32x4_t __b
)
6041 return __builtin_mve_vqaddq_sv4si (__a
, __b
);
6044 __extension__
extern __inline int32x4_t
6045 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6046 __arm_vqaddq_n_s32 (int32x4_t __a
, int32_t __b
)
6048 return __builtin_mve_vqaddq_n_sv4si (__a
, __b
);
6051 __extension__
extern __inline int32x4_t
6052 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6053 __arm_vorrq_s32 (int32x4_t __a
, int32x4_t __b
)
6055 return __builtin_mve_vorrq_sv4si (__a
, __b
);
6058 __extension__
extern __inline int32x4_t
6059 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6060 __arm_vornq_s32 (int32x4_t __a
, int32x4_t __b
)
6062 return __builtin_mve_vornq_sv4si (__a
, __b
);
6065 __extension__
extern __inline int32x4_t
6066 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6067 __arm_vmulq_s32 (int32x4_t __a
, int32x4_t __b
)
6069 return __builtin_mve_vmulq_sv4si (__a
, __b
);
6072 __extension__
extern __inline int32x4_t
6073 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6074 __arm_vmulq_n_s32 (int32x4_t __a
, int32_t __b
)
6076 return __builtin_mve_vmulq_n_sv4si (__a
, __b
);
6079 __extension__
extern __inline int64x2_t
6080 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6081 __arm_vmulltq_int_s32 (int32x4_t __a
, int32x4_t __b
)
6083 return __builtin_mve_vmulltq_int_sv4si (__a
, __b
);
6086 __extension__
extern __inline int64x2_t
6087 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6088 __arm_vmullbq_int_s32 (int32x4_t __a
, int32x4_t __b
)
6090 return __builtin_mve_vmullbq_int_sv4si (__a
, __b
);
6093 __extension__
extern __inline int32x4_t
6094 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6095 __arm_vmulhq_s32 (int32x4_t __a
, int32x4_t __b
)
6097 return __builtin_mve_vmulhq_sv4si (__a
, __b
);
6100 __extension__
extern __inline
int32_t
6101 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6102 __arm_vmlsdavxq_s32 (int32x4_t __a
, int32x4_t __b
)
6104 return __builtin_mve_vmlsdavxq_sv4si (__a
, __b
);
6107 __extension__
extern __inline
int32_t
6108 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6109 __arm_vmlsdavq_s32 (int32x4_t __a
, int32x4_t __b
)
6111 return __builtin_mve_vmlsdavq_sv4si (__a
, __b
);
6114 __extension__
extern __inline
int32_t
6115 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6116 __arm_vmladavxq_s32 (int32x4_t __a
, int32x4_t __b
)
6118 return __builtin_mve_vmladavxq_sv4si (__a
, __b
);
6121 __extension__
extern __inline
int32_t
6122 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6123 __arm_vmladavq_s32 (int32x4_t __a
, int32x4_t __b
)
6125 return __builtin_mve_vmladavq_sv4si (__a
, __b
);
6128 __extension__
extern __inline
int32_t
6129 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6130 __arm_vminvq_s32 (int32_t __a
, int32x4_t __b
)
6132 return __builtin_mve_vminvq_sv4si (__a
, __b
);
6135 __extension__
extern __inline int32x4_t
6136 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6137 __arm_vminq_s32 (int32x4_t __a
, int32x4_t __b
)
6139 return __builtin_mve_vminq_sv4si (__a
, __b
);
6142 __extension__
extern __inline
int32_t
6143 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6144 __arm_vmaxvq_s32 (int32_t __a
, int32x4_t __b
)
6146 return __builtin_mve_vmaxvq_sv4si (__a
, __b
);
6149 __extension__
extern __inline int32x4_t
6150 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6151 __arm_vmaxq_s32 (int32x4_t __a
, int32x4_t __b
)
6153 return __builtin_mve_vmaxq_sv4si (__a
, __b
);
6156 __extension__
extern __inline int32x4_t
6157 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6158 __arm_vhsubq_s32 (int32x4_t __a
, int32x4_t __b
)
6160 return __builtin_mve_vhsubq_sv4si (__a
, __b
);
6163 __extension__
extern __inline int32x4_t
6164 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6165 __arm_vhsubq_n_s32 (int32x4_t __a
, int32_t __b
)
6167 return __builtin_mve_vhsubq_n_sv4si (__a
, __b
);
6170 __extension__
extern __inline int32x4_t
6171 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6172 __arm_vhcaddq_rot90_s32 (int32x4_t __a
, int32x4_t __b
)
6174 return __builtin_mve_vhcaddq_rot90_sv4si (__a
, __b
);
6177 __extension__
extern __inline int32x4_t
6178 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6179 __arm_vhcaddq_rot270_s32 (int32x4_t __a
, int32x4_t __b
)
6181 return __builtin_mve_vhcaddq_rot270_sv4si (__a
, __b
);
6184 __extension__
extern __inline int32x4_t
6185 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6186 __arm_vhaddq_s32 (int32x4_t __a
, int32x4_t __b
)
6188 return __builtin_mve_vhaddq_sv4si (__a
, __b
);
6191 __extension__
extern __inline int32x4_t
6192 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6193 __arm_vhaddq_n_s32 (int32x4_t __a
, int32_t __b
)
6195 return __builtin_mve_vhaddq_n_sv4si (__a
, __b
);
6198 __extension__
extern __inline int32x4_t
6199 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6200 __arm_veorq_s32 (int32x4_t __a
, int32x4_t __b
)
6202 return __builtin_mve_veorq_sv4si (__a
, __b
);
6205 __extension__
extern __inline int32x4_t
6206 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6207 __arm_vcaddq_rot90_s32 (int32x4_t __a
, int32x4_t __b
)
6209 return __builtin_mve_vcaddq_rot90v4si (__a
, __b
);
6212 __extension__
extern __inline int32x4_t
6213 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6214 __arm_vcaddq_rot270_s32 (int32x4_t __a
, int32x4_t __b
)
6216 return __builtin_mve_vcaddq_rot270v4si (__a
, __b
);
6219 __extension__
extern __inline int32x4_t
6220 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6221 __arm_vbrsrq_n_s32 (int32x4_t __a
, int32_t __b
)
6223 return __builtin_mve_vbrsrq_n_sv4si (__a
, __b
);
6226 __extension__
extern __inline int32x4_t
6227 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6228 __arm_vbicq_s32 (int32x4_t __a
, int32x4_t __b
)
6230 return __builtin_mve_vbicq_sv4si (__a
, __b
);
6233 __extension__
extern __inline int32x4_t
6234 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6235 __arm_vandq_s32 (int32x4_t __a
, int32x4_t __b
)
6237 return __builtin_mve_vandq_sv4si (__a
, __b
);
6240 __extension__
extern __inline
int32_t
6241 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6242 __arm_vaddvaq_s32 (int32_t __a
, int32x4_t __b
)
6244 return __builtin_mve_vaddvaq_sv4si (__a
, __b
);
6247 __extension__
extern __inline int32x4_t
6248 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6249 __arm_vaddq_n_s32 (int32x4_t __a
, int32_t __b
)
6251 return __builtin_mve_vaddq_n_sv4si (__a
, __b
);
6254 __extension__
extern __inline int32x4_t
6255 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6256 __arm_vabdq_s32 (int32x4_t __a
, int32x4_t __b
)
6258 return __builtin_mve_vabdq_sv4si (__a
, __b
);
6261 __extension__
extern __inline int32x4_t
6262 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6263 __arm_vshlq_n_s32 (int32x4_t __a
, const int __imm
)
6265 return __builtin_mve_vshlq_n_sv4si (__a
, __imm
);
6268 __extension__
extern __inline int32x4_t
6269 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6270 __arm_vrshrq_n_s32 (int32x4_t __a
, const int __imm
)
6272 return __builtin_mve_vrshrq_n_sv4si (__a
, __imm
);
6275 __extension__
extern __inline int32x4_t
6276 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6277 __arm_vqshlq_n_s32 (int32x4_t __a
, const int __imm
)
6279 return __builtin_mve_vqshlq_n_sv4si (__a
, __imm
);
6282 __extension__
extern __inline uint8x16_t
6283 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6284 __arm_vqmovntq_u16 (uint8x16_t __a
, uint16x8_t __b
)
6286 return __builtin_mve_vqmovntq_uv8hi (__a
, __b
);
6289 __extension__
extern __inline uint8x16_t
6290 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6291 __arm_vqmovnbq_u16 (uint8x16_t __a
, uint16x8_t __b
)
6293 return __builtin_mve_vqmovnbq_uv8hi (__a
, __b
);
6296 __extension__
extern __inline uint16x8_t
6297 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6298 __arm_vmulltq_poly_p8 (uint8x16_t __a
, uint8x16_t __b
)
6300 return __builtin_mve_vmulltq_poly_pv16qi (__a
, __b
);
6303 __extension__
extern __inline uint16x8_t
6304 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6305 __arm_vmullbq_poly_p8 (uint8x16_t __a
, uint8x16_t __b
)
6307 return __builtin_mve_vmullbq_poly_pv16qi (__a
, __b
);
6310 __extension__
extern __inline uint8x16_t
6311 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6312 __arm_vmovntq_u16 (uint8x16_t __a
, uint16x8_t __b
)
6314 return __builtin_mve_vmovntq_uv8hi (__a
, __b
);
6317 __extension__
extern __inline uint8x16_t
6318 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6319 __arm_vmovnbq_u16 (uint8x16_t __a
, uint16x8_t __b
)
6321 return __builtin_mve_vmovnbq_uv8hi (__a
, __b
);
6324 __extension__
extern __inline
uint64_t
6325 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6326 __arm_vmlaldavq_u16 (uint16x8_t __a
, uint16x8_t __b
)
6328 return __builtin_mve_vmlaldavq_uv8hi (__a
, __b
);
6331 __extension__
extern __inline uint8x16_t
6332 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6333 __arm_vqmovuntq_s16 (uint8x16_t __a
, int16x8_t __b
)
6335 return __builtin_mve_vqmovuntq_sv8hi (__a
, __b
);
6338 __extension__
extern __inline uint8x16_t
6339 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6340 __arm_vqmovunbq_s16 (uint8x16_t __a
, int16x8_t __b
)
6342 return __builtin_mve_vqmovunbq_sv8hi (__a
, __b
);
6345 __extension__
extern __inline uint16x8_t
6346 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6347 __arm_vshlltq_n_u8 (uint8x16_t __a
, const int __imm
)
6349 return __builtin_mve_vshlltq_n_uv16qi (__a
, __imm
);
6352 __extension__
extern __inline uint16x8_t
6353 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6354 __arm_vshllbq_n_u8 (uint8x16_t __a
, const int __imm
)
6356 return __builtin_mve_vshllbq_n_uv16qi (__a
, __imm
);
6359 __extension__
extern __inline uint16x8_t
6360 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6361 __arm_vorrq_n_u16 (uint16x8_t __a
, const int __imm
)
6363 return __builtin_mve_vorrq_n_uv8hi (__a
, __imm
);
6366 __extension__
extern __inline uint16x8_t
6367 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6368 __arm_vbicq_n_u16 (uint16x8_t __a
, const int __imm
)
6370 return __builtin_mve_vbicq_n_uv8hi (__a
, __imm
);
6373 __extension__
extern __inline int8x16_t
6374 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6375 __arm_vqmovntq_s16 (int8x16_t __a
, int16x8_t __b
)
6377 return __builtin_mve_vqmovntq_sv8hi (__a
, __b
);
6380 __extension__
extern __inline int8x16_t
6381 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6382 __arm_vqmovnbq_s16 (int8x16_t __a
, int16x8_t __b
)
6384 return __builtin_mve_vqmovnbq_sv8hi (__a
, __b
);
6387 __extension__
extern __inline int32x4_t
6388 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6389 __arm_vqdmulltq_s16 (int16x8_t __a
, int16x8_t __b
)
6391 return __builtin_mve_vqdmulltq_sv8hi (__a
, __b
);
6394 __extension__
extern __inline int32x4_t
6395 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6396 __arm_vqdmulltq_n_s16 (int16x8_t __a
, int16_t __b
)
6398 return __builtin_mve_vqdmulltq_n_sv8hi (__a
, __b
);
6401 __extension__
extern __inline int32x4_t
6402 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6403 __arm_vqdmullbq_s16 (int16x8_t __a
, int16x8_t __b
)
6405 return __builtin_mve_vqdmullbq_sv8hi (__a
, __b
);
6408 __extension__
extern __inline int32x4_t
6409 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6410 __arm_vqdmullbq_n_s16 (int16x8_t __a
, int16_t __b
)
6412 return __builtin_mve_vqdmullbq_n_sv8hi (__a
, __b
);
6415 __extension__
extern __inline int8x16_t
6416 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6417 __arm_vmovntq_s16 (int8x16_t __a
, int16x8_t __b
)
6419 return __builtin_mve_vmovntq_sv8hi (__a
, __b
);
6422 __extension__
extern __inline int8x16_t
6423 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6424 __arm_vmovnbq_s16 (int8x16_t __a
, int16x8_t __b
)
6426 return __builtin_mve_vmovnbq_sv8hi (__a
, __b
);
6429 __extension__
extern __inline
int64_t
6430 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6431 __arm_vmlsldavxq_s16 (int16x8_t __a
, int16x8_t __b
)
6433 return __builtin_mve_vmlsldavxq_sv8hi (__a
, __b
);
6436 __extension__
extern __inline
int64_t
6437 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6438 __arm_vmlsldavq_s16 (int16x8_t __a
, int16x8_t __b
)
6440 return __builtin_mve_vmlsldavq_sv8hi (__a
, __b
);
6443 __extension__
extern __inline
int64_t
6444 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6445 __arm_vmlaldavxq_s16 (int16x8_t __a
, int16x8_t __b
)
6447 return __builtin_mve_vmlaldavxq_sv8hi (__a
, __b
);
6450 __extension__
extern __inline
int64_t
6451 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6452 __arm_vmlaldavq_s16 (int16x8_t __a
, int16x8_t __b
)
6454 return __builtin_mve_vmlaldavq_sv8hi (__a
, __b
);
6457 __extension__
extern __inline int16x8_t
6458 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6459 __arm_vshlltq_n_s8 (int8x16_t __a
, const int __imm
)
6461 return __builtin_mve_vshlltq_n_sv16qi (__a
, __imm
);
6464 __extension__
extern __inline int16x8_t
6465 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6466 __arm_vshllbq_n_s8 (int8x16_t __a
, const int __imm
)
6468 return __builtin_mve_vshllbq_n_sv16qi (__a
, __imm
);
6471 __extension__
extern __inline int16x8_t
6472 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6473 __arm_vorrq_n_s16 (int16x8_t __a
, const int __imm
)
6475 return __builtin_mve_vorrq_n_sv8hi (__a
, __imm
);
6478 __extension__
extern __inline int16x8_t
6479 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6480 __arm_vbicq_n_s16 (int16x8_t __a
, const int __imm
)
6482 return __builtin_mve_vbicq_n_sv8hi (__a
, __imm
);
6485 __extension__
extern __inline uint16x8_t
6486 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6487 __arm_vqmovntq_u32 (uint16x8_t __a
, uint32x4_t __b
)
6489 return __builtin_mve_vqmovntq_uv4si (__a
, __b
);
6492 __extension__
extern __inline uint16x8_t
6493 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6494 __arm_vqmovnbq_u32 (uint16x8_t __a
, uint32x4_t __b
)
6496 return __builtin_mve_vqmovnbq_uv4si (__a
, __b
);
6499 __extension__
extern __inline uint32x4_t
6500 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6501 __arm_vmulltq_poly_p16 (uint16x8_t __a
, uint16x8_t __b
)
6503 return __builtin_mve_vmulltq_poly_pv8hi (__a
, __b
);
6506 __extension__
extern __inline uint32x4_t
6507 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6508 __arm_vmullbq_poly_p16 (uint16x8_t __a
, uint16x8_t __b
)
6510 return __builtin_mve_vmullbq_poly_pv8hi (__a
, __b
);
6513 __extension__
extern __inline uint16x8_t
6514 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6515 __arm_vmovntq_u32 (uint16x8_t __a
, uint32x4_t __b
)
6517 return __builtin_mve_vmovntq_uv4si (__a
, __b
);
6520 __extension__
extern __inline uint16x8_t
6521 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6522 __arm_vmovnbq_u32 (uint16x8_t __a
, uint32x4_t __b
)
6524 return __builtin_mve_vmovnbq_uv4si (__a
, __b
);
6527 __extension__
extern __inline
uint64_t
6528 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6529 __arm_vmlaldavq_u32 (uint32x4_t __a
, uint32x4_t __b
)
6531 return __builtin_mve_vmlaldavq_uv4si (__a
, __b
);
6534 __extension__
extern __inline uint16x8_t
6535 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6536 __arm_vqmovuntq_s32 (uint16x8_t __a
, int32x4_t __b
)
6538 return __builtin_mve_vqmovuntq_sv4si (__a
, __b
);
6541 __extension__
extern __inline uint16x8_t
6542 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6543 __arm_vqmovunbq_s32 (uint16x8_t __a
, int32x4_t __b
)
6545 return __builtin_mve_vqmovunbq_sv4si (__a
, __b
);
6548 __extension__
extern __inline uint32x4_t
6549 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6550 __arm_vshlltq_n_u16 (uint16x8_t __a
, const int __imm
)
6552 return __builtin_mve_vshlltq_n_uv8hi (__a
, __imm
);
6555 __extension__
extern __inline uint32x4_t
6556 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6557 __arm_vshllbq_n_u16 (uint16x8_t __a
, const int __imm
)
6559 return __builtin_mve_vshllbq_n_uv8hi (__a
, __imm
);
6562 __extension__
extern __inline uint32x4_t
6563 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6564 __arm_vorrq_n_u32 (uint32x4_t __a
, const int __imm
)
6566 return __builtin_mve_vorrq_n_uv4si (__a
, __imm
);
6569 __extension__
extern __inline uint32x4_t
6570 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6571 __arm_vbicq_n_u32 (uint32x4_t __a
, const int __imm
)
6573 return __builtin_mve_vbicq_n_uv4si (__a
, __imm
);
6576 __extension__
extern __inline int16x8_t
6577 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6578 __arm_vqmovntq_s32 (int16x8_t __a
, int32x4_t __b
)
6580 return __builtin_mve_vqmovntq_sv4si (__a
, __b
);
6583 __extension__
extern __inline int16x8_t
6584 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6585 __arm_vqmovnbq_s32 (int16x8_t __a
, int32x4_t __b
)
6587 return __builtin_mve_vqmovnbq_sv4si (__a
, __b
);
6590 __extension__
extern __inline int64x2_t
6591 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6592 __arm_vqdmulltq_s32 (int32x4_t __a
, int32x4_t __b
)
6594 return __builtin_mve_vqdmulltq_sv4si (__a
, __b
);
6597 __extension__
extern __inline int64x2_t
6598 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6599 __arm_vqdmulltq_n_s32 (int32x4_t __a
, int32_t __b
)
6601 return __builtin_mve_vqdmulltq_n_sv4si (__a
, __b
);
6604 __extension__
extern __inline int64x2_t
6605 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6606 __arm_vqdmullbq_s32 (int32x4_t __a
, int32x4_t __b
)
6608 return __builtin_mve_vqdmullbq_sv4si (__a
, __b
);
6611 __extension__
extern __inline int64x2_t
6612 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6613 __arm_vqdmullbq_n_s32 (int32x4_t __a
, int32_t __b
)
6615 return __builtin_mve_vqdmullbq_n_sv4si (__a
, __b
);
6618 __extension__
extern __inline int16x8_t
6619 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6620 __arm_vmovntq_s32 (int16x8_t __a
, int32x4_t __b
)
6622 return __builtin_mve_vmovntq_sv4si (__a
, __b
);
6625 __extension__
extern __inline int16x8_t
6626 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6627 __arm_vmovnbq_s32 (int16x8_t __a
, int32x4_t __b
)
6629 return __builtin_mve_vmovnbq_sv4si (__a
, __b
);
6632 __extension__
extern __inline
int64_t
6633 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6634 __arm_vmlsldavxq_s32 (int32x4_t __a
, int32x4_t __b
)
6636 return __builtin_mve_vmlsldavxq_sv4si (__a
, __b
);
6639 __extension__
extern __inline
int64_t
6640 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6641 __arm_vmlsldavq_s32 (int32x4_t __a
, int32x4_t __b
)
6643 return __builtin_mve_vmlsldavq_sv4si (__a
, __b
);
6646 __extension__
extern __inline
int64_t
6647 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6648 __arm_vmlaldavxq_s32 (int32x4_t __a
, int32x4_t __b
)
6650 return __builtin_mve_vmlaldavxq_sv4si (__a
, __b
);
6653 __extension__
extern __inline
int64_t
6654 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6655 __arm_vmlaldavq_s32 (int32x4_t __a
, int32x4_t __b
)
6657 return __builtin_mve_vmlaldavq_sv4si (__a
, __b
);
6660 __extension__
extern __inline int32x4_t
6661 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6662 __arm_vshlltq_n_s16 (int16x8_t __a
, const int __imm
)
6664 return __builtin_mve_vshlltq_n_sv8hi (__a
, __imm
);
6667 __extension__
extern __inline int32x4_t
6668 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6669 __arm_vshllbq_n_s16 (int16x8_t __a
, const int __imm
)
6671 return __builtin_mve_vshllbq_n_sv8hi (__a
, __imm
);
6674 __extension__
extern __inline int32x4_t
6675 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6676 __arm_vorrq_n_s32 (int32x4_t __a
, const int __imm
)
6678 return __builtin_mve_vorrq_n_sv4si (__a
, __imm
);
6681 __extension__
extern __inline int32x4_t
6682 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6683 __arm_vbicq_n_s32 (int32x4_t __a
, const int __imm
)
6685 return __builtin_mve_vbicq_n_sv4si (__a
, __imm
);
6688 __extension__
extern __inline
uint64_t
6689 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6690 __arm_vrmlaldavhq_u32 (uint32x4_t __a
, uint32x4_t __b
)
6692 return __builtin_mve_vrmlaldavhq_uv4si (__a
, __b
);
6695 __extension__
extern __inline mve_pred16_t
6696 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6697 __arm_vctp8q_m (uint32_t __a
, mve_pred16_t __p
)
6699 return __builtin_mve_vctp8q_mhi (__a
, __p
);
6702 __extension__
extern __inline mve_pred16_t
6703 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6704 __arm_vctp64q_m (uint32_t __a
, mve_pred16_t __p
)
6706 return __builtin_mve_vctp64q_mhi (__a
, __p
);
6709 __extension__
extern __inline mve_pred16_t
6710 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6711 __arm_vctp32q_m (uint32_t __a
, mve_pred16_t __p
)
6713 return __builtin_mve_vctp32q_mhi (__a
, __p
);
6716 __extension__
extern __inline mve_pred16_t
6717 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6718 __arm_vctp16q_m (uint32_t __a
, mve_pred16_t __p
)
6720 return __builtin_mve_vctp16q_mhi (__a
, __p
);
6723 __extension__
extern __inline
uint64_t
6724 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6725 __arm_vaddlvaq_u32 (uint64_t __a
, uint32x4_t __b
)
6727 return __builtin_mve_vaddlvaq_uv4si (__a
, __b
);
6730 __extension__
extern __inline
int64_t
6731 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6732 __arm_vrmlsldavhxq_s32 (int32x4_t __a
, int32x4_t __b
)
6734 return __builtin_mve_vrmlsldavhxq_sv4si (__a
, __b
);
6737 __extension__
extern __inline
int64_t
6738 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6739 __arm_vrmlsldavhq_s32 (int32x4_t __a
, int32x4_t __b
)
6741 return __builtin_mve_vrmlsldavhq_sv4si (__a
, __b
);
6744 __extension__
extern __inline
int64_t
6745 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6746 __arm_vrmlaldavhxq_s32 (int32x4_t __a
, int32x4_t __b
)
6748 return __builtin_mve_vrmlaldavhxq_sv4si (__a
, __b
);
6751 __extension__
extern __inline
int64_t
6752 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6753 __arm_vrmlaldavhq_s32 (int32x4_t __a
, int32x4_t __b
)
6755 return __builtin_mve_vrmlaldavhq_sv4si (__a
, __b
);
6758 __extension__
extern __inline
int64_t
6759 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6760 __arm_vaddlvaq_s32 (int64_t __a
, int32x4_t __b
)
6762 return __builtin_mve_vaddlvaq_sv4si (__a
, __b
);
6765 __extension__
extern __inline
uint32_t
6766 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6767 __arm_vabavq_s8 (uint32_t __a
, int8x16_t __b
, int8x16_t __c
)
6769 return __builtin_mve_vabavq_sv16qi (__a
, __b
, __c
);
6772 __extension__
extern __inline
uint32_t
6773 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6774 __arm_vabavq_s16 (uint32_t __a
, int16x8_t __b
, int16x8_t __c
)
6776 return __builtin_mve_vabavq_sv8hi (__a
, __b
, __c
);
6779 __extension__
extern __inline
uint32_t
6780 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6781 __arm_vabavq_s32 (uint32_t __a
, int32x4_t __b
, int32x4_t __c
)
6783 return __builtin_mve_vabavq_sv4si (__a
, __b
, __c
);
6786 __extension__
extern __inline
uint32_t
6787 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6788 __arm_vabavq_u8 (uint32_t __a
, uint8x16_t __b
, uint8x16_t __c
)
6790 return __builtin_mve_vabavq_uv16qi(__a
, __b
, __c
);
6793 __extension__
extern __inline
uint32_t
6794 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6795 __arm_vabavq_u16 (uint32_t __a
, uint16x8_t __b
, uint16x8_t __c
)
6797 return __builtin_mve_vabavq_uv8hi(__a
, __b
, __c
);
6800 __extension__
extern __inline
uint32_t
6801 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6802 __arm_vabavq_u32 (uint32_t __a
, uint32x4_t __b
, uint32x4_t __c
)
6804 return __builtin_mve_vabavq_uv4si(__a
, __b
, __c
);
6807 __extension__
extern __inline int16x8_t
6808 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6809 __arm_vbicq_m_n_s16 (int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
6811 return __builtin_mve_vbicq_m_n_sv8hi (__a
, __imm
, __p
);
6814 __extension__
extern __inline int32x4_t
6815 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6816 __arm_vbicq_m_n_s32 (int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
6818 return __builtin_mve_vbicq_m_n_sv4si (__a
, __imm
, __p
);
6821 __extension__
extern __inline uint16x8_t
6822 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6823 __arm_vbicq_m_n_u16 (uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
6825 return __builtin_mve_vbicq_m_n_uv8hi (__a
, __imm
, __p
);
6828 __extension__
extern __inline uint32x4_t
6829 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6830 __arm_vbicq_m_n_u32 (uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
6832 return __builtin_mve_vbicq_m_n_uv4si (__a
, __imm
, __p
);
6835 __extension__
extern __inline int8x16_t
6836 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6837 __arm_vqrshrnbq_n_s16 (int8x16_t __a
, int16x8_t __b
, const int __imm
)
6839 return __builtin_mve_vqrshrnbq_n_sv8hi (__a
, __b
, __imm
);
6842 __extension__
extern __inline uint8x16_t
6843 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6844 __arm_vqrshrnbq_n_u16 (uint8x16_t __a
, uint16x8_t __b
, const int __imm
)
6846 return __builtin_mve_vqrshrnbq_n_uv8hi (__a
, __b
, __imm
);
6849 __extension__
extern __inline int16x8_t
6850 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6851 __arm_vqrshrnbq_n_s32 (int16x8_t __a
, int32x4_t __b
, const int __imm
)
6853 return __builtin_mve_vqrshrnbq_n_sv4si (__a
, __b
, __imm
);
6856 __extension__
extern __inline uint16x8_t
6857 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6858 __arm_vqrshrnbq_n_u32 (uint16x8_t __a
, uint32x4_t __b
, const int __imm
)
6860 return __builtin_mve_vqrshrnbq_n_uv4si (__a
, __b
, __imm
);
6863 __extension__
extern __inline uint8x16_t
6864 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6865 __arm_vqrshrunbq_n_s16 (uint8x16_t __a
, int16x8_t __b
, const int __imm
)
6867 return __builtin_mve_vqrshrunbq_n_sv8hi (__a
, __b
, __imm
);
6870 __extension__
extern __inline uint16x8_t
6871 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6872 __arm_vqrshrunbq_n_s32 (uint16x8_t __a
, int32x4_t __b
, const int __imm
)
6874 return __builtin_mve_vqrshrunbq_n_sv4si (__a
, __b
, __imm
);
6877 __extension__
extern __inline
int64_t
6878 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6879 __arm_vrmlaldavhaq_s32 (int64_t __a
, int32x4_t __b
, int32x4_t __c
)
6881 return __builtin_mve_vrmlaldavhaq_sv4si (__a
, __b
, __c
);
6884 __extension__
extern __inline
uint64_t
6885 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6886 __arm_vrmlaldavhaq_u32 (uint64_t __a
, uint32x4_t __b
, uint32x4_t __c
)
6888 return __builtin_mve_vrmlaldavhaq_uv4si (__a
, __b
, __c
);
6891 __extension__
extern __inline int8x16_t
6892 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6893 __arm_vshlcq_s8 (int8x16_t __a
, uint32_t * __b
, const int __imm
)
6895 int8x16_t __res
= __builtin_mve_vshlcq_vec_sv16qi (__a
, *__b
, __imm
);
6896 *__b
= __builtin_mve_vshlcq_carry_sv16qi (__a
, *__b
, __imm
);
6900 __extension__
extern __inline uint8x16_t
6901 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6902 __arm_vshlcq_u8 (uint8x16_t __a
, uint32_t * __b
, const int __imm
)
6904 uint8x16_t __res
= __builtin_mve_vshlcq_vec_uv16qi (__a
, *__b
, __imm
);
6905 *__b
= __builtin_mve_vshlcq_carry_uv16qi (__a
, *__b
, __imm
);
6909 __extension__
extern __inline int16x8_t
6910 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6911 __arm_vshlcq_s16 (int16x8_t __a
, uint32_t * __b
, const int __imm
)
6913 int16x8_t __res
= __builtin_mve_vshlcq_vec_sv8hi (__a
, *__b
, __imm
);
6914 *__b
= __builtin_mve_vshlcq_carry_sv8hi (__a
, *__b
, __imm
);
6918 __extension__
extern __inline uint16x8_t
6919 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6920 __arm_vshlcq_u16 (uint16x8_t __a
, uint32_t * __b
, const int __imm
)
6922 uint16x8_t __res
= __builtin_mve_vshlcq_vec_uv8hi (__a
, *__b
, __imm
);
6923 *__b
= __builtin_mve_vshlcq_carry_uv8hi (__a
, *__b
, __imm
);
6927 __extension__
extern __inline int32x4_t
6928 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6929 __arm_vshlcq_s32 (int32x4_t __a
, uint32_t * __b
, const int __imm
)
6931 int32x4_t __res
= __builtin_mve_vshlcq_vec_sv4si (__a
, *__b
, __imm
);
6932 *__b
= __builtin_mve_vshlcq_carry_sv4si (__a
, *__b
, __imm
);
6936 __extension__
extern __inline uint32x4_t
6937 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6938 __arm_vshlcq_u32 (uint32x4_t __a
, uint32_t * __b
, const int __imm
)
6940 uint32x4_t __res
= __builtin_mve_vshlcq_vec_uv4si (__a
, *__b
, __imm
);
6941 *__b
= __builtin_mve_vshlcq_carry_uv4si (__a
, *__b
, __imm
);
6945 __extension__
extern __inline uint8x16_t
6946 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6947 __arm_vpselq_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
6949 return __builtin_mve_vpselq_uv16qi (__a
, __b
, __p
);
6952 __extension__
extern __inline int8x16_t
6953 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6954 __arm_vpselq_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
6956 return __builtin_mve_vpselq_sv16qi (__a
, __b
, __p
);
6959 __extension__
extern __inline uint8x16_t
6960 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6961 __arm_vrev64q_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, mve_pred16_t __p
)
6963 return __builtin_mve_vrev64q_m_uv16qi (__inactive
, __a
, __p
);
6966 __extension__
extern __inline uint8x16_t
6967 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6968 __arm_vmvnq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, mve_pred16_t __p
)
6970 return __builtin_mve_vmvnq_m_uv16qi (__inactive
, __a
, __p
);
6973 __extension__
extern __inline uint8x16_t
6974 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6975 __arm_vmlasq_n_u8 (uint8x16_t __a
, uint8x16_t __b
, uint8_t __c
)
6977 return __builtin_mve_vmlasq_n_uv16qi (__a
, __b
, __c
);
6980 __extension__
extern __inline uint8x16_t
6981 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6982 __arm_vmlaq_n_u8 (uint8x16_t __a
, uint8x16_t __b
, uint8_t __c
)
6984 return __builtin_mve_vmlaq_n_uv16qi (__a
, __b
, __c
);
6987 __extension__
extern __inline
uint32_t
6988 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6989 __arm_vmladavq_p_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
6991 return __builtin_mve_vmladavq_p_uv16qi (__a
, __b
, __p
);
6994 __extension__
extern __inline
uint32_t
6995 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
6996 __arm_vmladavaq_u8 (uint32_t __a
, uint8x16_t __b
, uint8x16_t __c
)
6998 return __builtin_mve_vmladavaq_uv16qi (__a
, __b
, __c
);
7001 __extension__
extern __inline
uint8_t
7002 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7003 __arm_vminvq_p_u8 (uint8_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
7005 return __builtin_mve_vminvq_p_uv16qi (__a
, __b
, __p
);
7008 __extension__
extern __inline
uint8_t
7009 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7010 __arm_vmaxvq_p_u8 (uint8_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
7012 return __builtin_mve_vmaxvq_p_uv16qi (__a
, __b
, __p
);
7015 __extension__
extern __inline uint8x16_t
7016 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7017 __arm_vdupq_m_n_u8 (uint8x16_t __inactive
, uint8_t __a
, mve_pred16_t __p
)
7019 return __builtin_mve_vdupq_m_n_uv16qi (__inactive
, __a
, __p
);
7022 __extension__
extern __inline mve_pred16_t
7023 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7024 __arm_vcmpneq_m_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
7026 return __builtin_mve_vcmpneq_m_uv16qi (__a
, __b
, __p
);
7029 __extension__
extern __inline mve_pred16_t
7030 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7031 __arm_vcmpneq_m_n_u8 (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
7033 return __builtin_mve_vcmpneq_m_n_uv16qi (__a
, __b
, __p
);
7036 __extension__
extern __inline mve_pred16_t
7037 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7038 __arm_vcmphiq_m_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
7040 return __builtin_mve_vcmphiq_m_uv16qi (__a
, __b
, __p
);
7043 __extension__
extern __inline mve_pred16_t
7044 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7045 __arm_vcmphiq_m_n_u8 (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
7047 return __builtin_mve_vcmphiq_m_n_uv16qi (__a
, __b
, __p
);
7050 __extension__
extern __inline mve_pred16_t
7051 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7052 __arm_vcmpeqq_m_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
7054 return __builtin_mve_vcmpeqq_m_uv16qi (__a
, __b
, __p
);
7057 __extension__
extern __inline mve_pred16_t
7058 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7059 __arm_vcmpeqq_m_n_u8 (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
7061 return __builtin_mve_vcmpeqq_m_n_uv16qi (__a
, __b
, __p
);
7064 __extension__
extern __inline mve_pred16_t
7065 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7066 __arm_vcmpcsq_m_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
7068 return __builtin_mve_vcmpcsq_m_uv16qi (__a
, __b
, __p
);
7071 __extension__
extern __inline mve_pred16_t
7072 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7073 __arm_vcmpcsq_m_n_u8 (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
7075 return __builtin_mve_vcmpcsq_m_n_uv16qi (__a
, __b
, __p
);
7078 __extension__
extern __inline uint8x16_t
7079 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7080 __arm_vclzq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, mve_pred16_t __p
)
7082 return __builtin_mve_vclzq_m_uv16qi (__inactive
, __a
, __p
);
7085 __extension__
extern __inline
uint32_t
7086 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7087 __arm_vaddvaq_p_u8 (uint32_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
7089 return __builtin_mve_vaddvaq_p_uv16qi (__a
, __b
, __p
);
7092 __extension__
extern __inline uint8x16_t
7093 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7094 __arm_vsriq_n_u8 (uint8x16_t __a
, uint8x16_t __b
, const int __imm
)
7096 return __builtin_mve_vsriq_n_uv16qi (__a
, __b
, __imm
);
7099 __extension__
extern __inline uint8x16_t
7100 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7101 __arm_vsliq_n_u8 (uint8x16_t __a
, uint8x16_t __b
, const int __imm
)
7103 return __builtin_mve_vsliq_n_uv16qi (__a
, __b
, __imm
);
7106 __extension__
extern __inline uint8x16_t
7107 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7108 __arm_vshlq_m_r_u8 (uint8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
7110 return __builtin_mve_vshlq_m_r_uv16qi (__a
, __b
, __p
);
7113 __extension__
extern __inline uint8x16_t
7114 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7115 __arm_vrshlq_m_n_u8 (uint8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
7117 return __builtin_mve_vrshlq_m_n_uv16qi (__a
, __b
, __p
);
7120 __extension__
extern __inline uint8x16_t
7121 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7122 __arm_vqshlq_m_r_u8 (uint8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
7124 return __builtin_mve_vqshlq_m_r_uv16qi (__a
, __b
, __p
);
7127 __extension__
extern __inline uint8x16_t
7128 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7129 __arm_vqrshlq_m_n_u8 (uint8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
7131 return __builtin_mve_vqrshlq_m_n_uv16qi (__a
, __b
, __p
);
7134 __extension__
extern __inline
uint8_t
7135 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7136 __arm_vminavq_p_s8 (uint8_t __a
, int8x16_t __b
, mve_pred16_t __p
)
7138 return __builtin_mve_vminavq_p_sv16qi (__a
, __b
, __p
);
7141 __extension__
extern __inline uint8x16_t
7142 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7143 __arm_vminaq_m_s8 (uint8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
7145 return __builtin_mve_vminaq_m_sv16qi (__a
, __b
, __p
);
7148 __extension__
extern __inline
uint8_t
7149 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7150 __arm_vmaxavq_p_s8 (uint8_t __a
, int8x16_t __b
, mve_pred16_t __p
)
7152 return __builtin_mve_vmaxavq_p_sv16qi (__a
, __b
, __p
);
7155 __extension__
extern __inline uint8x16_t
7156 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7157 __arm_vmaxaq_m_s8 (uint8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
7159 return __builtin_mve_vmaxaq_m_sv16qi (__a
, __b
, __p
);
7162 __extension__
extern __inline mve_pred16_t
7163 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7164 __arm_vcmpneq_m_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
7166 return __builtin_mve_vcmpneq_m_sv16qi (__a
, __b
, __p
);
7169 __extension__
extern __inline mve_pred16_t
7170 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7171 __arm_vcmpneq_m_n_s8 (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
7173 return __builtin_mve_vcmpneq_m_n_sv16qi (__a
, __b
, __p
);
7176 __extension__
extern __inline mve_pred16_t
7177 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7178 __arm_vcmpltq_m_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
7180 return __builtin_mve_vcmpltq_m_sv16qi (__a
, __b
, __p
);
7183 __extension__
extern __inline mve_pred16_t
7184 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7185 __arm_vcmpltq_m_n_s8 (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
7187 return __builtin_mve_vcmpltq_m_n_sv16qi (__a
, __b
, __p
);
7190 __extension__
extern __inline mve_pred16_t
7191 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7192 __arm_vcmpleq_m_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
7194 return __builtin_mve_vcmpleq_m_sv16qi (__a
, __b
, __p
);
7197 __extension__
extern __inline mve_pred16_t
7198 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7199 __arm_vcmpleq_m_n_s8 (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
7201 return __builtin_mve_vcmpleq_m_n_sv16qi (__a
, __b
, __p
);
7204 __extension__
extern __inline mve_pred16_t
7205 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7206 __arm_vcmpgtq_m_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
7208 return __builtin_mve_vcmpgtq_m_sv16qi (__a
, __b
, __p
);
7211 __extension__
extern __inline mve_pred16_t
7212 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7213 __arm_vcmpgtq_m_n_s8 (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
7215 return __builtin_mve_vcmpgtq_m_n_sv16qi (__a
, __b
, __p
);
7218 __extension__
extern __inline mve_pred16_t
7219 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7220 __arm_vcmpgeq_m_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
7222 return __builtin_mve_vcmpgeq_m_sv16qi (__a
, __b
, __p
);
7225 __extension__
extern __inline mve_pred16_t
7226 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7227 __arm_vcmpgeq_m_n_s8 (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
7229 return __builtin_mve_vcmpgeq_m_n_sv16qi (__a
, __b
, __p
);
7232 __extension__
extern __inline mve_pred16_t
7233 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7234 __arm_vcmpeqq_m_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
7236 return __builtin_mve_vcmpeqq_m_sv16qi (__a
, __b
, __p
);
7239 __extension__
extern __inline mve_pred16_t
7240 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7241 __arm_vcmpeqq_m_n_s8 (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
7243 return __builtin_mve_vcmpeqq_m_n_sv16qi (__a
, __b
, __p
);
7246 __extension__
extern __inline int8x16_t
7247 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7248 __arm_vshlq_m_r_s8 (int8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
7250 return __builtin_mve_vshlq_m_r_sv16qi (__a
, __b
, __p
);
7253 __extension__
extern __inline int8x16_t
7254 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7255 __arm_vrshlq_m_n_s8 (int8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
7257 return __builtin_mve_vrshlq_m_n_sv16qi (__a
, __b
, __p
);
7260 __extension__
extern __inline int8x16_t
7261 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7262 __arm_vrev64q_m_s8 (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
7264 return __builtin_mve_vrev64q_m_sv16qi (__inactive
, __a
, __p
);
7267 __extension__
extern __inline int8x16_t
7268 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7269 __arm_vqshlq_m_r_s8 (int8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
7271 return __builtin_mve_vqshlq_m_r_sv16qi (__a
, __b
, __p
);
7274 __extension__
extern __inline int8x16_t
7275 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7276 __arm_vqrshlq_m_n_s8 (int8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
7278 return __builtin_mve_vqrshlq_m_n_sv16qi (__a
, __b
, __p
);
7281 __extension__
extern __inline int8x16_t
7282 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7283 __arm_vqnegq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
7285 return __builtin_mve_vqnegq_m_sv16qi (__inactive
, __a
, __p
);
7288 __extension__
extern __inline int8x16_t
7289 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7290 __arm_vqabsq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
7292 return __builtin_mve_vqabsq_m_sv16qi (__inactive
, __a
, __p
);
7295 __extension__
extern __inline int8x16_t
7296 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7297 __arm_vnegq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
7299 return __builtin_mve_vnegq_m_sv16qi (__inactive
, __a
, __p
);
7303 __extension__
extern __inline int8x16_t
7304 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7305 __arm_vmvnq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
7307 return __builtin_mve_vmvnq_m_sv16qi (__inactive
, __a
, __p
);
7310 __extension__
extern __inline
int32_t
7311 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7312 __arm_vmlsdavxq_p_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
7314 return __builtin_mve_vmlsdavxq_p_sv16qi (__a
, __b
, __p
);
7317 __extension__
extern __inline
int32_t
7318 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7319 __arm_vmlsdavq_p_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
7321 return __builtin_mve_vmlsdavq_p_sv16qi (__a
, __b
, __p
);
7324 __extension__
extern __inline
int32_t
7325 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7326 __arm_vmladavxq_p_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
7328 return __builtin_mve_vmladavxq_p_sv16qi (__a
, __b
, __p
);
7331 __extension__
extern __inline
int32_t
7332 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7333 __arm_vmladavq_p_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
7335 return __builtin_mve_vmladavq_p_sv16qi (__a
, __b
, __p
);
7338 __extension__
extern __inline
int8_t
7339 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7340 __arm_vminvq_p_s8 (int8_t __a
, int8x16_t __b
, mve_pred16_t __p
)
7342 return __builtin_mve_vminvq_p_sv16qi (__a
, __b
, __p
);
7345 __extension__
extern __inline
int8_t
7346 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7347 __arm_vmaxvq_p_s8 (int8_t __a
, int8x16_t __b
, mve_pred16_t __p
)
7349 return __builtin_mve_vmaxvq_p_sv16qi (__a
, __b
, __p
);
7352 __extension__
extern __inline int8x16_t
7353 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7354 __arm_vdupq_m_n_s8 (int8x16_t __inactive
, int8_t __a
, mve_pred16_t __p
)
7356 return __builtin_mve_vdupq_m_n_sv16qi (__inactive
, __a
, __p
);
7359 __extension__
extern __inline int8x16_t
7360 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7361 __arm_vclzq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
7363 return __builtin_mve_vclzq_m_sv16qi (__inactive
, __a
, __p
);
7366 __extension__
extern __inline int8x16_t
7367 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7368 __arm_vclsq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
7370 return __builtin_mve_vclsq_m_sv16qi (__inactive
, __a
, __p
);
7373 __extension__
extern __inline
int32_t
7374 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7375 __arm_vaddvaq_p_s8 (int32_t __a
, int8x16_t __b
, mve_pred16_t __p
)
7377 return __builtin_mve_vaddvaq_p_sv16qi (__a
, __b
, __p
);
7380 __extension__
extern __inline int8x16_t
7381 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7382 __arm_vabsq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
7384 return __builtin_mve_vabsq_m_sv16qi (__inactive
, __a
, __p
);
7387 __extension__
extern __inline int8x16_t
7388 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7389 __arm_vqrdmlsdhxq_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
)
7391 return __builtin_mve_vqrdmlsdhxq_sv16qi (__inactive
, __a
, __b
);
7394 __extension__
extern __inline int8x16_t
7395 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7396 __arm_vqrdmlsdhq_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
)
7398 return __builtin_mve_vqrdmlsdhq_sv16qi (__inactive
, __a
, __b
);
7401 __extension__
extern __inline int8x16_t
7402 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7403 __arm_vqrdmlashq_n_s8 (int8x16_t __a
, int8x16_t __b
, int8_t __c
)
7405 return __builtin_mve_vqrdmlashq_n_sv16qi (__a
, __b
, __c
);
7408 __extension__
extern __inline int8x16_t
7409 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7410 __arm_vqdmlashq_n_s8 (int8x16_t __a
, int8x16_t __b
, int8_t __c
)
7412 return __builtin_mve_vqdmlashq_n_sv16qi (__a
, __b
, __c
);
7415 __extension__
extern __inline int8x16_t
7416 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7417 __arm_vqrdmlahq_n_s8 (int8x16_t __a
, int8x16_t __b
, int8_t __c
)
7419 return __builtin_mve_vqrdmlahq_n_sv16qi (__a
, __b
, __c
);
7422 __extension__
extern __inline int8x16_t
7423 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7424 __arm_vqrdmladhxq_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
)
7426 return __builtin_mve_vqrdmladhxq_sv16qi (__inactive
, __a
, __b
);
7429 __extension__
extern __inline int8x16_t
7430 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7431 __arm_vqrdmladhq_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
)
7433 return __builtin_mve_vqrdmladhq_sv16qi (__inactive
, __a
, __b
);
7436 __extension__
extern __inline int8x16_t
7437 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7438 __arm_vqdmlsdhxq_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
)
7440 return __builtin_mve_vqdmlsdhxq_sv16qi (__inactive
, __a
, __b
);
7443 __extension__
extern __inline int8x16_t
7444 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7445 __arm_vqdmlsdhq_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
)
7447 return __builtin_mve_vqdmlsdhq_sv16qi (__inactive
, __a
, __b
);
7450 __extension__
extern __inline int8x16_t
7451 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7452 __arm_vqdmlahq_n_s8 (int8x16_t __a
, int8x16_t __b
, int8_t __c
)
7454 return __builtin_mve_vqdmlahq_n_sv16qi (__a
, __b
, __c
);
7457 __extension__
extern __inline int8x16_t
7458 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7459 __arm_vqdmladhxq_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
)
7461 return __builtin_mve_vqdmladhxq_sv16qi (__inactive
, __a
, __b
);
7464 __extension__
extern __inline int8x16_t
7465 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7466 __arm_vqdmladhq_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
)
7468 return __builtin_mve_vqdmladhq_sv16qi (__inactive
, __a
, __b
);
7471 __extension__
extern __inline
int32_t
7472 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7473 __arm_vmlsdavaxq_s8 (int32_t __a
, int8x16_t __b
, int8x16_t __c
)
7475 return __builtin_mve_vmlsdavaxq_sv16qi (__a
, __b
, __c
);
7478 __extension__
extern __inline
int32_t
7479 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7480 __arm_vmlsdavaq_s8 (int32_t __a
, int8x16_t __b
, int8x16_t __c
)
7482 return __builtin_mve_vmlsdavaq_sv16qi (__a
, __b
, __c
);
7485 __extension__
extern __inline int8x16_t
7486 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7487 __arm_vmlasq_n_s8 (int8x16_t __a
, int8x16_t __b
, int8_t __c
)
7489 return __builtin_mve_vmlasq_n_sv16qi (__a
, __b
, __c
);
7492 __extension__
extern __inline int8x16_t
7493 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7494 __arm_vmlaq_n_s8 (int8x16_t __a
, int8x16_t __b
, int8_t __c
)
7496 return __builtin_mve_vmlaq_n_sv16qi (__a
, __b
, __c
);
7499 __extension__
extern __inline
int32_t
7500 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7501 __arm_vmladavaxq_s8 (int32_t __a
, int8x16_t __b
, int8x16_t __c
)
7503 return __builtin_mve_vmladavaxq_sv16qi (__a
, __b
, __c
);
7506 __extension__
extern __inline
int32_t
7507 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7508 __arm_vmladavaq_s8 (int32_t __a
, int8x16_t __b
, int8x16_t __c
)
7510 return __builtin_mve_vmladavaq_sv16qi (__a
, __b
, __c
);
7513 __extension__
extern __inline int8x16_t
7514 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7515 __arm_vsriq_n_s8 (int8x16_t __a
, int8x16_t __b
, const int __imm
)
7517 return __builtin_mve_vsriq_n_sv16qi (__a
, __b
, __imm
);
7520 __extension__
extern __inline int8x16_t
7521 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7522 __arm_vsliq_n_s8 (int8x16_t __a
, int8x16_t __b
, const int __imm
)
7524 return __builtin_mve_vsliq_n_sv16qi (__a
, __b
, __imm
);
7527 __extension__
extern __inline uint16x8_t
7528 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7529 __arm_vpselq_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
7531 return __builtin_mve_vpselq_uv8hi (__a
, __b
, __p
);
7534 __extension__
extern __inline int16x8_t
7535 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7536 __arm_vpselq_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7538 return __builtin_mve_vpselq_sv8hi (__a
, __b
, __p
);
7541 __extension__
extern __inline uint16x8_t
7542 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7543 __arm_vrev64q_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, mve_pred16_t __p
)
7545 return __builtin_mve_vrev64q_m_uv8hi (__inactive
, __a
, __p
);
7548 __extension__
extern __inline uint16x8_t
7549 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7550 __arm_vmvnq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, mve_pred16_t __p
)
7552 return __builtin_mve_vmvnq_m_uv8hi (__inactive
, __a
, __p
);
7555 __extension__
extern __inline uint16x8_t
7556 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7557 __arm_vmlasq_n_u16 (uint16x8_t __a
, uint16x8_t __b
, uint16_t __c
)
7559 return __builtin_mve_vmlasq_n_uv8hi (__a
, __b
, __c
);
7562 __extension__
extern __inline uint16x8_t
7563 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7564 __arm_vmlaq_n_u16 (uint16x8_t __a
, uint16x8_t __b
, uint16_t __c
)
7566 return __builtin_mve_vmlaq_n_uv8hi (__a
, __b
, __c
);
7569 __extension__
extern __inline
uint32_t
7570 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7571 __arm_vmladavq_p_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
7573 return __builtin_mve_vmladavq_p_uv8hi (__a
, __b
, __p
);
7576 __extension__
extern __inline
uint32_t
7577 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7578 __arm_vmladavaq_u16 (uint32_t __a
, uint16x8_t __b
, uint16x8_t __c
)
7580 return __builtin_mve_vmladavaq_uv8hi (__a
, __b
, __c
);
7583 __extension__
extern __inline
uint16_t
7584 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7585 __arm_vminvq_p_u16 (uint16_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
7587 return __builtin_mve_vminvq_p_uv8hi (__a
, __b
, __p
);
7590 __extension__
extern __inline
uint16_t
7591 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7592 __arm_vmaxvq_p_u16 (uint16_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
7594 return __builtin_mve_vmaxvq_p_uv8hi (__a
, __b
, __p
);
7597 __extension__
extern __inline uint16x8_t
7598 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7599 __arm_vdupq_m_n_u16 (uint16x8_t __inactive
, uint16_t __a
, mve_pred16_t __p
)
7601 return __builtin_mve_vdupq_m_n_uv8hi (__inactive
, __a
, __p
);
7604 __extension__
extern __inline mve_pred16_t
7605 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7606 __arm_vcmpneq_m_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
7608 return __builtin_mve_vcmpneq_m_uv8hi (__a
, __b
, __p
);
7611 __extension__
extern __inline mve_pred16_t
7612 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7613 __arm_vcmpneq_m_n_u16 (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
7615 return __builtin_mve_vcmpneq_m_n_uv8hi (__a
, __b
, __p
);
7618 __extension__
extern __inline mve_pred16_t
7619 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7620 __arm_vcmphiq_m_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
7622 return __builtin_mve_vcmphiq_m_uv8hi (__a
, __b
, __p
);
7625 __extension__
extern __inline mve_pred16_t
7626 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7627 __arm_vcmphiq_m_n_u16 (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
7629 return __builtin_mve_vcmphiq_m_n_uv8hi (__a
, __b
, __p
);
7632 __extension__
extern __inline mve_pred16_t
7633 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7634 __arm_vcmpeqq_m_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
7636 return __builtin_mve_vcmpeqq_m_uv8hi (__a
, __b
, __p
);
7639 __extension__
extern __inline mve_pred16_t
7640 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7641 __arm_vcmpeqq_m_n_u16 (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
7643 return __builtin_mve_vcmpeqq_m_n_uv8hi (__a
, __b
, __p
);
7646 __extension__
extern __inline mve_pred16_t
7647 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7648 __arm_vcmpcsq_m_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
7650 return __builtin_mve_vcmpcsq_m_uv8hi (__a
, __b
, __p
);
7653 __extension__
extern __inline mve_pred16_t
7654 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7655 __arm_vcmpcsq_m_n_u16 (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
7657 return __builtin_mve_vcmpcsq_m_n_uv8hi (__a
, __b
, __p
);
7660 __extension__
extern __inline uint16x8_t
7661 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7662 __arm_vclzq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, mve_pred16_t __p
)
7664 return __builtin_mve_vclzq_m_uv8hi (__inactive
, __a
, __p
);
7667 __extension__
extern __inline
uint32_t
7668 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7669 __arm_vaddvaq_p_u16 (uint32_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
7671 return __builtin_mve_vaddvaq_p_uv8hi (__a
, __b
, __p
);
7674 __extension__
extern __inline uint16x8_t
7675 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7676 __arm_vsriq_n_u16 (uint16x8_t __a
, uint16x8_t __b
, const int __imm
)
7678 return __builtin_mve_vsriq_n_uv8hi (__a
, __b
, __imm
);
7681 __extension__
extern __inline uint16x8_t
7682 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7683 __arm_vsliq_n_u16 (uint16x8_t __a
, uint16x8_t __b
, const int __imm
)
7685 return __builtin_mve_vsliq_n_uv8hi (__a
, __b
, __imm
);
7688 __extension__
extern __inline uint16x8_t
7689 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7690 __arm_vshlq_m_r_u16 (uint16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
7692 return __builtin_mve_vshlq_m_r_uv8hi (__a
, __b
, __p
);
7695 __extension__
extern __inline uint16x8_t
7696 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7697 __arm_vrshlq_m_n_u16 (uint16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
7699 return __builtin_mve_vrshlq_m_n_uv8hi (__a
, __b
, __p
);
7702 __extension__
extern __inline uint16x8_t
7703 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7704 __arm_vqshlq_m_r_u16 (uint16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
7706 return __builtin_mve_vqshlq_m_r_uv8hi (__a
, __b
, __p
);
7709 __extension__
extern __inline uint16x8_t
7710 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7711 __arm_vqrshlq_m_n_u16 (uint16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
7713 return __builtin_mve_vqrshlq_m_n_uv8hi (__a
, __b
, __p
);
7716 __extension__
extern __inline
uint16_t
7717 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7718 __arm_vminavq_p_s16 (uint16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7720 return __builtin_mve_vminavq_p_sv8hi (__a
, __b
, __p
);
7723 __extension__
extern __inline uint16x8_t
7724 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7725 __arm_vminaq_m_s16 (uint16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7727 return __builtin_mve_vminaq_m_sv8hi (__a
, __b
, __p
);
7730 __extension__
extern __inline
uint16_t
7731 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7732 __arm_vmaxavq_p_s16 (uint16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7734 return __builtin_mve_vmaxavq_p_sv8hi (__a
, __b
, __p
);
7737 __extension__
extern __inline uint16x8_t
7738 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7739 __arm_vmaxaq_m_s16 (uint16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7741 return __builtin_mve_vmaxaq_m_sv8hi (__a
, __b
, __p
);
7744 __extension__
extern __inline mve_pred16_t
7745 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7746 __arm_vcmpneq_m_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7748 return __builtin_mve_vcmpneq_m_sv8hi (__a
, __b
, __p
);
7751 __extension__
extern __inline mve_pred16_t
7752 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7753 __arm_vcmpneq_m_n_s16 (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
7755 return __builtin_mve_vcmpneq_m_n_sv8hi (__a
, __b
, __p
);
7758 __extension__
extern __inline mve_pred16_t
7759 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7760 __arm_vcmpltq_m_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7762 return __builtin_mve_vcmpltq_m_sv8hi (__a
, __b
, __p
);
7765 __extension__
extern __inline mve_pred16_t
7766 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7767 __arm_vcmpltq_m_n_s16 (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
7769 return __builtin_mve_vcmpltq_m_n_sv8hi (__a
, __b
, __p
);
7772 __extension__
extern __inline mve_pred16_t
7773 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7774 __arm_vcmpleq_m_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7776 return __builtin_mve_vcmpleq_m_sv8hi (__a
, __b
, __p
);
7779 __extension__
extern __inline mve_pred16_t
7780 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7781 __arm_vcmpleq_m_n_s16 (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
7783 return __builtin_mve_vcmpleq_m_n_sv8hi (__a
, __b
, __p
);
7786 __extension__
extern __inline mve_pred16_t
7787 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7788 __arm_vcmpgtq_m_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7790 return __builtin_mve_vcmpgtq_m_sv8hi (__a
, __b
, __p
);
7793 __extension__
extern __inline mve_pred16_t
7794 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7795 __arm_vcmpgtq_m_n_s16 (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
7797 return __builtin_mve_vcmpgtq_m_n_sv8hi (__a
, __b
, __p
);
7800 __extension__
extern __inline mve_pred16_t
7801 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7802 __arm_vcmpgeq_m_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7804 return __builtin_mve_vcmpgeq_m_sv8hi (__a
, __b
, __p
);
7807 __extension__
extern __inline mve_pred16_t
7808 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7809 __arm_vcmpgeq_m_n_s16 (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
7811 return __builtin_mve_vcmpgeq_m_n_sv8hi (__a
, __b
, __p
);
7814 __extension__
extern __inline mve_pred16_t
7815 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7816 __arm_vcmpeqq_m_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7818 return __builtin_mve_vcmpeqq_m_sv8hi (__a
, __b
, __p
);
7821 __extension__
extern __inline mve_pred16_t
7822 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7823 __arm_vcmpeqq_m_n_s16 (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
7825 return __builtin_mve_vcmpeqq_m_n_sv8hi (__a
, __b
, __p
);
7828 __extension__
extern __inline int16x8_t
7829 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7830 __arm_vshlq_m_r_s16 (int16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
7832 return __builtin_mve_vshlq_m_r_sv8hi (__a
, __b
, __p
);
7835 __extension__
extern __inline int16x8_t
7836 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7837 __arm_vrshlq_m_n_s16 (int16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
7839 return __builtin_mve_vrshlq_m_n_sv8hi (__a
, __b
, __p
);
7842 __extension__
extern __inline int16x8_t
7843 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7844 __arm_vrev64q_m_s16 (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
7846 return __builtin_mve_vrev64q_m_sv8hi (__inactive
, __a
, __p
);
7849 __extension__
extern __inline int16x8_t
7850 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7851 __arm_vqshlq_m_r_s16 (int16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
7853 return __builtin_mve_vqshlq_m_r_sv8hi (__a
, __b
, __p
);
7856 __extension__
extern __inline int16x8_t
7857 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7858 __arm_vqrshlq_m_n_s16 (int16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
7860 return __builtin_mve_vqrshlq_m_n_sv8hi (__a
, __b
, __p
);
7863 __extension__
extern __inline int16x8_t
7864 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7865 __arm_vqnegq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
7867 return __builtin_mve_vqnegq_m_sv8hi (__inactive
, __a
, __p
);
7870 __extension__
extern __inline int16x8_t
7871 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7872 __arm_vqabsq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
7874 return __builtin_mve_vqabsq_m_sv8hi (__inactive
, __a
, __p
);
7877 __extension__
extern __inline int16x8_t
7878 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7879 __arm_vnegq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
7881 return __builtin_mve_vnegq_m_sv8hi (__inactive
, __a
, __p
);
7884 __extension__
extern __inline int16x8_t
7885 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7886 __arm_vmvnq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
7888 return __builtin_mve_vmvnq_m_sv8hi (__inactive
, __a
, __p
);
7891 __extension__
extern __inline
int32_t
7892 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7893 __arm_vmlsdavxq_p_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7895 return __builtin_mve_vmlsdavxq_p_sv8hi (__a
, __b
, __p
);
7898 __extension__
extern __inline
int32_t
7899 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7900 __arm_vmlsdavq_p_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7902 return __builtin_mve_vmlsdavq_p_sv8hi (__a
, __b
, __p
);
7905 __extension__
extern __inline
int32_t
7906 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7907 __arm_vmladavxq_p_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7909 return __builtin_mve_vmladavxq_p_sv8hi (__a
, __b
, __p
);
7912 __extension__
extern __inline
int32_t
7913 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7914 __arm_vmladavq_p_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7916 return __builtin_mve_vmladavq_p_sv8hi (__a
, __b
, __p
);
7919 __extension__
extern __inline
int16_t
7920 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7921 __arm_vminvq_p_s16 (int16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7923 return __builtin_mve_vminvq_p_sv8hi (__a
, __b
, __p
);
7926 __extension__
extern __inline
int16_t
7927 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7928 __arm_vmaxvq_p_s16 (int16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7930 return __builtin_mve_vmaxvq_p_sv8hi (__a
, __b
, __p
);
7933 __extension__
extern __inline int16x8_t
7934 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7935 __arm_vdupq_m_n_s16 (int16x8_t __inactive
, int16_t __a
, mve_pred16_t __p
)
7937 return __builtin_mve_vdupq_m_n_sv8hi (__inactive
, __a
, __p
);
7940 __extension__
extern __inline int16x8_t
7941 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7942 __arm_vclzq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
7944 return __builtin_mve_vclzq_m_sv8hi (__inactive
, __a
, __p
);
7947 __extension__
extern __inline int16x8_t
7948 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7949 __arm_vclsq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
7951 return __builtin_mve_vclsq_m_sv8hi (__inactive
, __a
, __p
);
7954 __extension__
extern __inline
int32_t
7955 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7956 __arm_vaddvaq_p_s16 (int32_t __a
, int16x8_t __b
, mve_pred16_t __p
)
7958 return __builtin_mve_vaddvaq_p_sv8hi (__a
, __b
, __p
);
7961 __extension__
extern __inline int16x8_t
7962 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7963 __arm_vabsq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
7965 return __builtin_mve_vabsq_m_sv8hi (__inactive
, __a
, __p
);
7968 __extension__
extern __inline int16x8_t
7969 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7970 __arm_vqrdmlsdhxq_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
)
7972 return __builtin_mve_vqrdmlsdhxq_sv8hi (__inactive
, __a
, __b
);
7975 __extension__
extern __inline int16x8_t
7976 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7977 __arm_vqrdmlsdhq_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
)
7979 return __builtin_mve_vqrdmlsdhq_sv8hi (__inactive
, __a
, __b
);
7982 __extension__
extern __inline int16x8_t
7983 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7984 __arm_vqrdmlashq_n_s16 (int16x8_t __a
, int16x8_t __b
, int16_t __c
)
7986 return __builtin_mve_vqrdmlashq_n_sv8hi (__a
, __b
, __c
);
7989 __extension__
extern __inline int16x8_t
7990 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7991 __arm_vqdmlashq_n_s16 (int16x8_t __a
, int16x8_t __b
, int16_t __c
)
7993 return __builtin_mve_vqdmlashq_n_sv8hi (__a
, __b
, __c
);
7996 __extension__
extern __inline int16x8_t
7997 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
7998 __arm_vqrdmlahq_n_s16 (int16x8_t __a
, int16x8_t __b
, int16_t __c
)
8000 return __builtin_mve_vqrdmlahq_n_sv8hi (__a
, __b
, __c
);
8003 __extension__
extern __inline int16x8_t
8004 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8005 __arm_vqrdmladhxq_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
)
8007 return __builtin_mve_vqrdmladhxq_sv8hi (__inactive
, __a
, __b
);
8010 __extension__
extern __inline int16x8_t
8011 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8012 __arm_vqrdmladhq_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
)
8014 return __builtin_mve_vqrdmladhq_sv8hi (__inactive
, __a
, __b
);
8017 __extension__
extern __inline int16x8_t
8018 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8019 __arm_vqdmlsdhxq_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
)
8021 return __builtin_mve_vqdmlsdhxq_sv8hi (__inactive
, __a
, __b
);
8024 __extension__
extern __inline int16x8_t
8025 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8026 __arm_vqdmlsdhq_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
)
8028 return __builtin_mve_vqdmlsdhq_sv8hi (__inactive
, __a
, __b
);
8031 __extension__
extern __inline int16x8_t
8032 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8033 __arm_vqdmlahq_n_s16 (int16x8_t __a
, int16x8_t __b
, int16_t __c
)
8035 return __builtin_mve_vqdmlahq_n_sv8hi (__a
, __b
, __c
);
8038 __extension__
extern __inline int16x8_t
8039 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8040 __arm_vqdmladhxq_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
)
8042 return __builtin_mve_vqdmladhxq_sv8hi (__inactive
, __a
, __b
);
8045 __extension__
extern __inline int16x8_t
8046 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8047 __arm_vqdmladhq_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
)
8049 return __builtin_mve_vqdmladhq_sv8hi (__inactive
, __a
, __b
);
8052 __extension__
extern __inline
int32_t
8053 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8054 __arm_vmlsdavaxq_s16 (int32_t __a
, int16x8_t __b
, int16x8_t __c
)
8056 return __builtin_mve_vmlsdavaxq_sv8hi (__a
, __b
, __c
);
8059 __extension__
extern __inline
int32_t
8060 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8061 __arm_vmlsdavaq_s16 (int32_t __a
, int16x8_t __b
, int16x8_t __c
)
8063 return __builtin_mve_vmlsdavaq_sv8hi (__a
, __b
, __c
);
8066 __extension__
extern __inline int16x8_t
8067 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8068 __arm_vmlasq_n_s16 (int16x8_t __a
, int16x8_t __b
, int16_t __c
)
8070 return __builtin_mve_vmlasq_n_sv8hi (__a
, __b
, __c
);
8073 __extension__
extern __inline int16x8_t
8074 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8075 __arm_vmlaq_n_s16 (int16x8_t __a
, int16x8_t __b
, int16_t __c
)
8077 return __builtin_mve_vmlaq_n_sv8hi (__a
, __b
, __c
);
8080 __extension__
extern __inline
int32_t
8081 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8082 __arm_vmladavaxq_s16 (int32_t __a
, int16x8_t __b
, int16x8_t __c
)
8084 return __builtin_mve_vmladavaxq_sv8hi (__a
, __b
, __c
);
8087 __extension__
extern __inline
int32_t
8088 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8089 __arm_vmladavaq_s16 (int32_t __a
, int16x8_t __b
, int16x8_t __c
)
8091 return __builtin_mve_vmladavaq_sv8hi (__a
, __b
, __c
);
8094 __extension__
extern __inline int16x8_t
8095 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8096 __arm_vsriq_n_s16 (int16x8_t __a
, int16x8_t __b
, const int __imm
)
8098 return __builtin_mve_vsriq_n_sv8hi (__a
, __b
, __imm
);
8101 __extension__
extern __inline int16x8_t
8102 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8103 __arm_vsliq_n_s16 (int16x8_t __a
, int16x8_t __b
, const int __imm
)
8105 return __builtin_mve_vsliq_n_sv8hi (__a
, __b
, __imm
);
8108 __extension__
extern __inline uint32x4_t
8109 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8110 __arm_vpselq_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
8112 return __builtin_mve_vpselq_uv4si (__a
, __b
, __p
);
8115 __extension__
extern __inline int32x4_t
8116 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8117 __arm_vpselq_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8119 return __builtin_mve_vpselq_sv4si (__a
, __b
, __p
);
8122 __extension__
extern __inline uint32x4_t
8123 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8124 __arm_vrev64q_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, mve_pred16_t __p
)
8126 return __builtin_mve_vrev64q_m_uv4si (__inactive
, __a
, __p
);
8129 __extension__
extern __inline uint32x4_t
8130 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8131 __arm_vmvnq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, mve_pred16_t __p
)
8133 return __builtin_mve_vmvnq_m_uv4si (__inactive
, __a
, __p
);
8136 __extension__
extern __inline uint32x4_t
8137 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8138 __arm_vmlasq_n_u32 (uint32x4_t __a
, uint32x4_t __b
, uint32_t __c
)
8140 return __builtin_mve_vmlasq_n_uv4si (__a
, __b
, __c
);
8143 __extension__
extern __inline uint32x4_t
8144 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8145 __arm_vmlaq_n_u32 (uint32x4_t __a
, uint32x4_t __b
, uint32_t __c
)
8147 return __builtin_mve_vmlaq_n_uv4si (__a
, __b
, __c
);
8150 __extension__
extern __inline
uint32_t
8151 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8152 __arm_vmladavq_p_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
8154 return __builtin_mve_vmladavq_p_uv4si (__a
, __b
, __p
);
8157 __extension__
extern __inline
uint32_t
8158 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8159 __arm_vmladavaq_u32 (uint32_t __a
, uint32x4_t __b
, uint32x4_t __c
)
8161 return __builtin_mve_vmladavaq_uv4si (__a
, __b
, __c
);
8164 __extension__
extern __inline
uint32_t
8165 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8166 __arm_vminvq_p_u32 (uint32_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
8168 return __builtin_mve_vminvq_p_uv4si (__a
, __b
, __p
);
8171 __extension__
extern __inline
uint32_t
8172 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8173 __arm_vmaxvq_p_u32 (uint32_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
8175 return __builtin_mve_vmaxvq_p_uv4si (__a
, __b
, __p
);
8178 __extension__
extern __inline uint32x4_t
8179 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8180 __arm_vdupq_m_n_u32 (uint32x4_t __inactive
, uint32_t __a
, mve_pred16_t __p
)
8182 return __builtin_mve_vdupq_m_n_uv4si (__inactive
, __a
, __p
);
8185 __extension__
extern __inline mve_pred16_t
8186 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8187 __arm_vcmpneq_m_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
8189 return __builtin_mve_vcmpneq_m_uv4si (__a
, __b
, __p
);
8192 __extension__
extern __inline mve_pred16_t
8193 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8194 __arm_vcmpneq_m_n_u32 (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
8196 return __builtin_mve_vcmpneq_m_n_uv4si (__a
, __b
, __p
);
8199 __extension__
extern __inline mve_pred16_t
8200 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8201 __arm_vcmphiq_m_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
8203 return __builtin_mve_vcmphiq_m_uv4si (__a
, __b
, __p
);
8206 __extension__
extern __inline mve_pred16_t
8207 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8208 __arm_vcmphiq_m_n_u32 (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
8210 return __builtin_mve_vcmphiq_m_n_uv4si (__a
, __b
, __p
);
8213 __extension__
extern __inline mve_pred16_t
8214 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8215 __arm_vcmpeqq_m_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
8217 return __builtin_mve_vcmpeqq_m_uv4si (__a
, __b
, __p
);
8220 __extension__
extern __inline mve_pred16_t
8221 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8222 __arm_vcmpeqq_m_n_u32 (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
8224 return __builtin_mve_vcmpeqq_m_n_uv4si (__a
, __b
, __p
);
8227 __extension__
extern __inline mve_pred16_t
8228 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8229 __arm_vcmpcsq_m_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
8231 return __builtin_mve_vcmpcsq_m_uv4si (__a
, __b
, __p
);
8234 __extension__
extern __inline mve_pred16_t
8235 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8236 __arm_vcmpcsq_m_n_u32 (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
8238 return __builtin_mve_vcmpcsq_m_n_uv4si (__a
, __b
, __p
);
8241 __extension__
extern __inline uint32x4_t
8242 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8243 __arm_vclzq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, mve_pred16_t __p
)
8245 return __builtin_mve_vclzq_m_uv4si (__inactive
, __a
, __p
);
8248 __extension__
extern __inline
uint32_t
8249 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8250 __arm_vaddvaq_p_u32 (uint32_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
8252 return __builtin_mve_vaddvaq_p_uv4si (__a
, __b
, __p
);
8255 __extension__
extern __inline uint32x4_t
8256 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8257 __arm_vsriq_n_u32 (uint32x4_t __a
, uint32x4_t __b
, const int __imm
)
8259 return __builtin_mve_vsriq_n_uv4si (__a
, __b
, __imm
);
8262 __extension__
extern __inline uint32x4_t
8263 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8264 __arm_vsliq_n_u32 (uint32x4_t __a
, uint32x4_t __b
, const int __imm
)
8266 return __builtin_mve_vsliq_n_uv4si (__a
, __b
, __imm
);
8269 __extension__
extern __inline uint32x4_t
8270 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8271 __arm_vshlq_m_r_u32 (uint32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
8273 return __builtin_mve_vshlq_m_r_uv4si (__a
, __b
, __p
);
8276 __extension__
extern __inline uint32x4_t
8277 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8278 __arm_vrshlq_m_n_u32 (uint32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
8280 return __builtin_mve_vrshlq_m_n_uv4si (__a
, __b
, __p
);
8283 __extension__
extern __inline uint32x4_t
8284 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8285 __arm_vqshlq_m_r_u32 (uint32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
8287 return __builtin_mve_vqshlq_m_r_uv4si (__a
, __b
, __p
);
8290 __extension__
extern __inline uint32x4_t
8291 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8292 __arm_vqrshlq_m_n_u32 (uint32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
8294 return __builtin_mve_vqrshlq_m_n_uv4si (__a
, __b
, __p
);
8297 __extension__
extern __inline
uint32_t
8298 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8299 __arm_vminavq_p_s32 (uint32_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8301 return __builtin_mve_vminavq_p_sv4si (__a
, __b
, __p
);
8304 __extension__
extern __inline uint32x4_t
8305 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8306 __arm_vminaq_m_s32 (uint32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8308 return __builtin_mve_vminaq_m_sv4si (__a
, __b
, __p
);
8311 __extension__
extern __inline
uint32_t
8312 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8313 __arm_vmaxavq_p_s32 (uint32_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8315 return __builtin_mve_vmaxavq_p_sv4si (__a
, __b
, __p
);
8318 __extension__
extern __inline uint32x4_t
8319 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8320 __arm_vmaxaq_m_s32 (uint32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8322 return __builtin_mve_vmaxaq_m_sv4si (__a
, __b
, __p
);
8325 __extension__
extern __inline mve_pred16_t
8326 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8327 __arm_vcmpneq_m_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8329 return __builtin_mve_vcmpneq_m_sv4si (__a
, __b
, __p
);
8332 __extension__
extern __inline mve_pred16_t
8333 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8334 __arm_vcmpneq_m_n_s32 (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
8336 return __builtin_mve_vcmpneq_m_n_sv4si (__a
, __b
, __p
);
8339 __extension__
extern __inline mve_pred16_t
8340 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8341 __arm_vcmpltq_m_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8343 return __builtin_mve_vcmpltq_m_sv4si (__a
, __b
, __p
);
8346 __extension__
extern __inline mve_pred16_t
8347 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8348 __arm_vcmpltq_m_n_s32 (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
8350 return __builtin_mve_vcmpltq_m_n_sv4si (__a
, __b
, __p
);
8353 __extension__
extern __inline mve_pred16_t
8354 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8355 __arm_vcmpleq_m_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8357 return __builtin_mve_vcmpleq_m_sv4si (__a
, __b
, __p
);
8360 __extension__
extern __inline mve_pred16_t
8361 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8362 __arm_vcmpleq_m_n_s32 (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
8364 return __builtin_mve_vcmpleq_m_n_sv4si (__a
, __b
, __p
);
8367 __extension__
extern __inline mve_pred16_t
8368 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8369 __arm_vcmpgtq_m_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8371 return __builtin_mve_vcmpgtq_m_sv4si (__a
, __b
, __p
);
8374 __extension__
extern __inline mve_pred16_t
8375 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8376 __arm_vcmpgtq_m_n_s32 (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
8378 return __builtin_mve_vcmpgtq_m_n_sv4si (__a
, __b
, __p
);
8381 __extension__
extern __inline mve_pred16_t
8382 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8383 __arm_vcmpgeq_m_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8385 return __builtin_mve_vcmpgeq_m_sv4si (__a
, __b
, __p
);
8388 __extension__
extern __inline mve_pred16_t
8389 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8390 __arm_vcmpgeq_m_n_s32 (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
8392 return __builtin_mve_vcmpgeq_m_n_sv4si (__a
, __b
, __p
);
8395 __extension__
extern __inline mve_pred16_t
8396 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8397 __arm_vcmpeqq_m_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8399 return __builtin_mve_vcmpeqq_m_sv4si (__a
, __b
, __p
);
8402 __extension__
extern __inline mve_pred16_t
8403 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8404 __arm_vcmpeqq_m_n_s32 (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
8406 return __builtin_mve_vcmpeqq_m_n_sv4si (__a
, __b
, __p
);
8409 __extension__
extern __inline int32x4_t
8410 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8411 __arm_vshlq_m_r_s32 (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
8413 return __builtin_mve_vshlq_m_r_sv4si (__a
, __b
, __p
);
8416 __extension__
extern __inline int32x4_t
8417 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8418 __arm_vrshlq_m_n_s32 (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
8420 return __builtin_mve_vrshlq_m_n_sv4si (__a
, __b
, __p
);
8423 __extension__
extern __inline int32x4_t
8424 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8425 __arm_vrev64q_m_s32 (int32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
8427 return __builtin_mve_vrev64q_m_sv4si (__inactive
, __a
, __p
);
8430 __extension__
extern __inline int32x4_t
8431 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8432 __arm_vqshlq_m_r_s32 (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
8434 return __builtin_mve_vqshlq_m_r_sv4si (__a
, __b
, __p
);
8437 __extension__
extern __inline int32x4_t
8438 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8439 __arm_vqrshlq_m_n_s32 (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
8441 return __builtin_mve_vqrshlq_m_n_sv4si (__a
, __b
, __p
);
8444 __extension__
extern __inline int32x4_t
8445 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8446 __arm_vqnegq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
8448 return __builtin_mve_vqnegq_m_sv4si (__inactive
, __a
, __p
);
8451 __extension__
extern __inline int32x4_t
8452 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8453 __arm_vqabsq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
8455 return __builtin_mve_vqabsq_m_sv4si (__inactive
, __a
, __p
);
8458 __extension__
extern __inline int32x4_t
8459 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8460 __arm_vnegq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
8462 return __builtin_mve_vnegq_m_sv4si (__inactive
, __a
, __p
);
8465 __extension__
extern __inline int32x4_t
8466 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8467 __arm_vmvnq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
8469 return __builtin_mve_vmvnq_m_sv4si (__inactive
, __a
, __p
);
8472 __extension__
extern __inline
int32_t
8473 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8474 __arm_vmlsdavxq_p_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8476 return __builtin_mve_vmlsdavxq_p_sv4si (__a
, __b
, __p
);
8479 __extension__
extern __inline
int32_t
8480 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8481 __arm_vmlsdavq_p_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8483 return __builtin_mve_vmlsdavq_p_sv4si (__a
, __b
, __p
);
8486 __extension__
extern __inline
int32_t
8487 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8488 __arm_vmladavxq_p_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8490 return __builtin_mve_vmladavxq_p_sv4si (__a
, __b
, __p
);
8493 __extension__
extern __inline
int32_t
8494 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8495 __arm_vmladavq_p_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8497 return __builtin_mve_vmladavq_p_sv4si (__a
, __b
, __p
);
8500 __extension__
extern __inline
int32_t
8501 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8502 __arm_vminvq_p_s32 (int32_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8504 return __builtin_mve_vminvq_p_sv4si (__a
, __b
, __p
);
8507 __extension__
extern __inline
int32_t
8508 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8509 __arm_vmaxvq_p_s32 (int32_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8511 return __builtin_mve_vmaxvq_p_sv4si (__a
, __b
, __p
);
8514 __extension__
extern __inline int32x4_t
8515 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8516 __arm_vdupq_m_n_s32 (int32x4_t __inactive
, int32_t __a
, mve_pred16_t __p
)
8518 return __builtin_mve_vdupq_m_n_sv4si (__inactive
, __a
, __p
);
8521 __extension__
extern __inline int32x4_t
8522 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8523 __arm_vclzq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
8525 return __builtin_mve_vclzq_m_sv4si (__inactive
, __a
, __p
);
8528 __extension__
extern __inline int32x4_t
8529 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8530 __arm_vclsq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
8532 return __builtin_mve_vclsq_m_sv4si (__inactive
, __a
, __p
);
8535 __extension__
extern __inline
int32_t
8536 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8537 __arm_vaddvaq_p_s32 (int32_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8539 return __builtin_mve_vaddvaq_p_sv4si (__a
, __b
, __p
);
8542 __extension__
extern __inline int32x4_t
8543 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8544 __arm_vabsq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
8546 return __builtin_mve_vabsq_m_sv4si (__inactive
, __a
, __p
);
8549 __extension__
extern __inline int32x4_t
8550 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8551 __arm_vqrdmlsdhxq_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
)
8553 return __builtin_mve_vqrdmlsdhxq_sv4si (__inactive
, __a
, __b
);
8556 __extension__
extern __inline int32x4_t
8557 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8558 __arm_vqrdmlsdhq_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
)
8560 return __builtin_mve_vqrdmlsdhq_sv4si (__inactive
, __a
, __b
);
8563 __extension__
extern __inline int32x4_t
8564 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8565 __arm_vqrdmlashq_n_s32 (int32x4_t __a
, int32x4_t __b
, int32_t __c
)
8567 return __builtin_mve_vqrdmlashq_n_sv4si (__a
, __b
, __c
);
8570 __extension__
extern __inline int32x4_t
8571 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8572 __arm_vqdmlashq_n_s32 (int32x4_t __a
, int32x4_t __b
, int32_t __c
)
8574 return __builtin_mve_vqdmlashq_n_sv4si (__a
, __b
, __c
);
8577 __extension__
extern __inline int32x4_t
8578 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8579 __arm_vqrdmlahq_n_s32 (int32x4_t __a
, int32x4_t __b
, int32_t __c
)
8581 return __builtin_mve_vqrdmlahq_n_sv4si (__a
, __b
, __c
);
8584 __extension__
extern __inline int32x4_t
8585 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8586 __arm_vqrdmladhxq_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
)
8588 return __builtin_mve_vqrdmladhxq_sv4si (__inactive
, __a
, __b
);
8591 __extension__
extern __inline int32x4_t
8592 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8593 __arm_vqrdmladhq_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
)
8595 return __builtin_mve_vqrdmladhq_sv4si (__inactive
, __a
, __b
);
8598 __extension__
extern __inline int32x4_t
8599 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8600 __arm_vqdmlsdhxq_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
)
8602 return __builtin_mve_vqdmlsdhxq_sv4si (__inactive
, __a
, __b
);
8605 __extension__
extern __inline int32x4_t
8606 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8607 __arm_vqdmlsdhq_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
)
8609 return __builtin_mve_vqdmlsdhq_sv4si (__inactive
, __a
, __b
);
8612 __extension__
extern __inline int32x4_t
8613 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8614 __arm_vqdmlahq_n_s32 (int32x4_t __a
, int32x4_t __b
, int32_t __c
)
8616 return __builtin_mve_vqdmlahq_n_sv4si (__a
, __b
, __c
);
8619 __extension__
extern __inline int32x4_t
8620 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8621 __arm_vqdmladhxq_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
)
8623 return __builtin_mve_vqdmladhxq_sv4si (__inactive
, __a
, __b
);
8626 __extension__
extern __inline int32x4_t
8627 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8628 __arm_vqdmladhq_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
)
8630 return __builtin_mve_vqdmladhq_sv4si (__inactive
, __a
, __b
);
8633 __extension__
extern __inline
int32_t
8634 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8635 __arm_vmlsdavaxq_s32 (int32_t __a
, int32x4_t __b
, int32x4_t __c
)
8637 return __builtin_mve_vmlsdavaxq_sv4si (__a
, __b
, __c
);
8640 __extension__
extern __inline
int32_t
8641 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8642 __arm_vmlsdavaq_s32 (int32_t __a
, int32x4_t __b
, int32x4_t __c
)
8644 return __builtin_mve_vmlsdavaq_sv4si (__a
, __b
, __c
);
8647 __extension__
extern __inline int32x4_t
8648 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8649 __arm_vmlasq_n_s32 (int32x4_t __a
, int32x4_t __b
, int32_t __c
)
8651 return __builtin_mve_vmlasq_n_sv4si (__a
, __b
, __c
);
8654 __extension__
extern __inline int32x4_t
8655 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8656 __arm_vmlaq_n_s32 (int32x4_t __a
, int32x4_t __b
, int32_t __c
)
8658 return __builtin_mve_vmlaq_n_sv4si (__a
, __b
, __c
);
8661 __extension__
extern __inline
int32_t
8662 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8663 __arm_vmladavaxq_s32 (int32_t __a
, int32x4_t __b
, int32x4_t __c
)
8665 return __builtin_mve_vmladavaxq_sv4si (__a
, __b
, __c
);
8668 __extension__
extern __inline
int32_t
8669 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8670 __arm_vmladavaq_s32 (int32_t __a
, int32x4_t __b
, int32x4_t __c
)
8672 return __builtin_mve_vmladavaq_sv4si (__a
, __b
, __c
);
8675 __extension__
extern __inline int32x4_t
8676 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8677 __arm_vsriq_n_s32 (int32x4_t __a
, int32x4_t __b
, const int __imm
)
8679 return __builtin_mve_vsriq_n_sv4si (__a
, __b
, __imm
);
8682 __extension__
extern __inline int32x4_t
8683 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8684 __arm_vsliq_n_s32 (int32x4_t __a
, int32x4_t __b
, const int __imm
)
8686 return __builtin_mve_vsliq_n_sv4si (__a
, __b
, __imm
);
8689 __extension__
extern __inline uint64x2_t
8690 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8691 __arm_vpselq_u64 (uint64x2_t __a
, uint64x2_t __b
, mve_pred16_t __p
)
8693 return __builtin_mve_vpselq_uv2di (__a
, __b
, __p
);
8696 __extension__
extern __inline int64x2_t
8697 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8698 __arm_vpselq_s64 (int64x2_t __a
, int64x2_t __b
, mve_pred16_t __p
)
8700 return __builtin_mve_vpselq_sv2di (__a
, __b
, __p
);
8703 __extension__
extern __inline
int64_t
8704 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8705 __arm_vrmlaldavhaxq_s32 (int64_t __a
, int32x4_t __b
, int32x4_t __c
)
8707 return __builtin_mve_vrmlaldavhaxq_sv4si (__a
, __b
, __c
);
8710 __extension__
extern __inline
int64_t
8711 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8712 __arm_vrmlsldavhaq_s32 (int64_t __a
, int32x4_t __b
, int32x4_t __c
)
8714 return __builtin_mve_vrmlsldavhaq_sv4si (__a
, __b
, __c
);
8717 __extension__
extern __inline
int64_t
8718 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8719 __arm_vrmlsldavhaxq_s32 (int64_t __a
, int32x4_t __b
, int32x4_t __c
)
8721 return __builtin_mve_vrmlsldavhaxq_sv4si (__a
, __b
, __c
);
8724 __extension__
extern __inline
int64_t
8725 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8726 __arm_vaddlvaq_p_s32 (int64_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8728 return __builtin_mve_vaddlvaq_p_sv4si (__a
, __b
, __p
);
8731 __extension__
extern __inline int8x16_t
8732 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8733 __arm_vrev16q_m_s8 (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
8735 return __builtin_mve_vrev16q_m_sv16qi (__inactive
, __a
, __p
);
8738 __extension__
extern __inline
int64_t
8739 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8740 __arm_vrmlaldavhq_p_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8742 return __builtin_mve_vrmlaldavhq_p_sv4si (__a
, __b
, __p
);
8745 __extension__
extern __inline
int64_t
8746 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8747 __arm_vrmlaldavhxq_p_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8749 return __builtin_mve_vrmlaldavhxq_p_sv4si (__a
, __b
, __p
);
8752 __extension__
extern __inline
int64_t
8753 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8754 __arm_vrmlsldavhq_p_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8756 return __builtin_mve_vrmlsldavhq_p_sv4si (__a
, __b
, __p
);
8759 __extension__
extern __inline
int64_t
8760 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8761 __arm_vrmlsldavhxq_p_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
8763 return __builtin_mve_vrmlsldavhxq_p_sv4si (__a
, __b
, __p
);
8766 __extension__
extern __inline
uint64_t
8767 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8768 __arm_vaddlvaq_p_u32 (uint64_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
8770 return __builtin_mve_vaddlvaq_p_uv4si (__a
, __b
, __p
);
8773 __extension__
extern __inline uint8x16_t
8774 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8775 __arm_vrev16q_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, mve_pred16_t __p
)
8777 return __builtin_mve_vrev16q_m_uv16qi (__inactive
, __a
, __p
);
8780 __extension__
extern __inline
uint64_t
8781 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8782 __arm_vrmlaldavhq_p_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
8784 return __builtin_mve_vrmlaldavhq_p_uv4si (__a
, __b
, __p
);
8787 __extension__
extern __inline int16x8_t
8788 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8789 __arm_vmvnq_m_n_s16 (int16x8_t __inactive
, const int __imm
, mve_pred16_t __p
)
8791 return __builtin_mve_vmvnq_m_n_sv8hi (__inactive
, __imm
, __p
);
8794 __extension__
extern __inline int16x8_t
8795 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8796 __arm_vorrq_m_n_s16 (int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
8798 return __builtin_mve_vorrq_m_n_sv8hi (__a
, __imm
, __p
);
8801 __extension__
extern __inline int8x16_t
8802 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8803 __arm_vqrshrntq_n_s16 (int8x16_t __a
, int16x8_t __b
, const int __imm
)
8805 return __builtin_mve_vqrshrntq_n_sv8hi (__a
, __b
, __imm
);
8808 __extension__
extern __inline int8x16_t
8809 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8810 __arm_vqshrnbq_n_s16 (int8x16_t __a
, int16x8_t __b
, const int __imm
)
8812 return __builtin_mve_vqshrnbq_n_sv8hi (__a
, __b
, __imm
);
8815 __extension__
extern __inline int8x16_t
8816 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8817 __arm_vqshrntq_n_s16 (int8x16_t __a
, int16x8_t __b
, const int __imm
)
8819 return __builtin_mve_vqshrntq_n_sv8hi (__a
, __b
, __imm
);
8822 __extension__
extern __inline int8x16_t
8823 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8824 __arm_vrshrnbq_n_s16 (int8x16_t __a
, int16x8_t __b
, const int __imm
)
8826 return __builtin_mve_vrshrnbq_n_sv8hi (__a
, __b
, __imm
);
8829 __extension__
extern __inline int8x16_t
8830 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8831 __arm_vrshrntq_n_s16 (int8x16_t __a
, int16x8_t __b
, const int __imm
)
8833 return __builtin_mve_vrshrntq_n_sv8hi (__a
, __b
, __imm
);
8836 __extension__
extern __inline int8x16_t
8837 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8838 __arm_vshrnbq_n_s16 (int8x16_t __a
, int16x8_t __b
, const int __imm
)
8840 return __builtin_mve_vshrnbq_n_sv8hi (__a
, __b
, __imm
);
8843 __extension__
extern __inline int8x16_t
8844 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8845 __arm_vshrntq_n_s16 (int8x16_t __a
, int16x8_t __b
, const int __imm
)
8847 return __builtin_mve_vshrntq_n_sv8hi (__a
, __b
, __imm
);
8850 __extension__
extern __inline
int64_t
8851 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8852 __arm_vmlaldavaq_s16 (int64_t __a
, int16x8_t __b
, int16x8_t __c
)
8854 return __builtin_mve_vmlaldavaq_sv8hi (__a
, __b
, __c
);
8857 __extension__
extern __inline
int64_t
8858 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8859 __arm_vmlaldavaxq_s16 (int64_t __a
, int16x8_t __b
, int16x8_t __c
)
8861 return __builtin_mve_vmlaldavaxq_sv8hi (__a
, __b
, __c
);
8864 __extension__
extern __inline
int64_t
8865 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8866 __arm_vmlsldavaq_s16 (int64_t __a
, int16x8_t __b
, int16x8_t __c
)
8868 return __builtin_mve_vmlsldavaq_sv8hi (__a
, __b
, __c
);
8871 __extension__
extern __inline
int64_t
8872 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8873 __arm_vmlsldavaxq_s16 (int64_t __a
, int16x8_t __b
, int16x8_t __c
)
8875 return __builtin_mve_vmlsldavaxq_sv8hi (__a
, __b
, __c
);
8878 __extension__
extern __inline
int64_t
8879 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8880 __arm_vmlaldavq_p_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
8882 return __builtin_mve_vmlaldavq_p_sv8hi (__a
, __b
, __p
);
8885 __extension__
extern __inline
int64_t
8886 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8887 __arm_vmlaldavxq_p_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
8889 return __builtin_mve_vmlaldavxq_p_sv8hi (__a
, __b
, __p
);
8892 __extension__
extern __inline
int64_t
8893 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8894 __arm_vmlsldavq_p_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
8896 return __builtin_mve_vmlsldavq_p_sv8hi (__a
, __b
, __p
);
8899 __extension__
extern __inline
int64_t
8900 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8901 __arm_vmlsldavxq_p_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
8903 return __builtin_mve_vmlsldavxq_p_sv8hi (__a
, __b
, __p
);
8906 __extension__
extern __inline int16x8_t
8907 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8908 __arm_vmovlbq_m_s8 (int16x8_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
8910 return __builtin_mve_vmovlbq_m_sv16qi (__inactive
, __a
, __p
);
8913 __extension__
extern __inline int16x8_t
8914 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8915 __arm_vmovltq_m_s8 (int16x8_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
8917 return __builtin_mve_vmovltq_m_sv16qi (__inactive
, __a
, __p
);
8920 __extension__
extern __inline int8x16_t
8921 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8922 __arm_vmovnbq_m_s16 (int8x16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
8924 return __builtin_mve_vmovnbq_m_sv8hi (__a
, __b
, __p
);
8927 __extension__
extern __inline int8x16_t
8928 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8929 __arm_vmovntq_m_s16 (int8x16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
8931 return __builtin_mve_vmovntq_m_sv8hi (__a
, __b
, __p
);
8934 __extension__
extern __inline int8x16_t
8935 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8936 __arm_vqmovnbq_m_s16 (int8x16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
8938 return __builtin_mve_vqmovnbq_m_sv8hi (__a
, __b
, __p
);
8941 __extension__
extern __inline int8x16_t
8942 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8943 __arm_vqmovntq_m_s16 (int8x16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
8945 return __builtin_mve_vqmovntq_m_sv8hi (__a
, __b
, __p
);
8948 __extension__
extern __inline int8x16_t
8949 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8950 __arm_vrev32q_m_s8 (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
8952 return __builtin_mve_vrev32q_m_sv16qi (__inactive
, __a
, __p
);
8955 __extension__
extern __inline uint16x8_t
8956 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8957 __arm_vmvnq_m_n_u16 (uint16x8_t __inactive
, const int __imm
, mve_pred16_t __p
)
8959 return __builtin_mve_vmvnq_m_n_uv8hi (__inactive
, __imm
, __p
);
8962 __extension__
extern __inline uint16x8_t
8963 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8964 __arm_vorrq_m_n_u16 (uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
8966 return __builtin_mve_vorrq_m_n_uv8hi (__a
, __imm
, __p
);
8969 __extension__
extern __inline uint8x16_t
8970 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8971 __arm_vqrshruntq_n_s16 (uint8x16_t __a
, int16x8_t __b
, const int __imm
)
8973 return __builtin_mve_vqrshruntq_n_sv8hi (__a
, __b
, __imm
);
8976 __extension__
extern __inline uint8x16_t
8977 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8978 __arm_vqshrunbq_n_s16 (uint8x16_t __a
, int16x8_t __b
, const int __imm
)
8980 return __builtin_mve_vqshrunbq_n_sv8hi (__a
, __b
, __imm
);
8983 __extension__
extern __inline uint8x16_t
8984 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8985 __arm_vqshruntq_n_s16 (uint8x16_t __a
, int16x8_t __b
, const int __imm
)
8987 return __builtin_mve_vqshruntq_n_sv8hi (__a
, __b
, __imm
);
8990 __extension__
extern __inline uint8x16_t
8991 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8992 __arm_vqmovunbq_m_s16 (uint8x16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
8994 return __builtin_mve_vqmovunbq_m_sv8hi (__a
, __b
, __p
);
8997 __extension__
extern __inline uint8x16_t
8998 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
8999 __arm_vqmovuntq_m_s16 (uint8x16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
9001 return __builtin_mve_vqmovuntq_m_sv8hi (__a
, __b
, __p
);
9004 __extension__
extern __inline uint8x16_t
9005 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9006 __arm_vqrshrntq_n_u16 (uint8x16_t __a
, uint16x8_t __b
, const int __imm
)
9008 return __builtin_mve_vqrshrntq_n_uv8hi (__a
, __b
, __imm
);
9011 __extension__
extern __inline uint8x16_t
9012 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9013 __arm_vqshrnbq_n_u16 (uint8x16_t __a
, uint16x8_t __b
, const int __imm
)
9015 return __builtin_mve_vqshrnbq_n_uv8hi (__a
, __b
, __imm
);
9018 __extension__
extern __inline uint8x16_t
9019 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9020 __arm_vqshrntq_n_u16 (uint8x16_t __a
, uint16x8_t __b
, const int __imm
)
9022 return __builtin_mve_vqshrntq_n_uv8hi (__a
, __b
, __imm
);
9025 __extension__
extern __inline uint8x16_t
9026 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9027 __arm_vrshrnbq_n_u16 (uint8x16_t __a
, uint16x8_t __b
, const int __imm
)
9029 return __builtin_mve_vrshrnbq_n_uv8hi (__a
, __b
, __imm
);
9032 __extension__
extern __inline uint8x16_t
9033 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9034 __arm_vrshrntq_n_u16 (uint8x16_t __a
, uint16x8_t __b
, const int __imm
)
9036 return __builtin_mve_vrshrntq_n_uv8hi (__a
, __b
, __imm
);
9039 __extension__
extern __inline uint8x16_t
9040 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9041 __arm_vshrnbq_n_u16 (uint8x16_t __a
, uint16x8_t __b
, const int __imm
)
9043 return __builtin_mve_vshrnbq_n_uv8hi (__a
, __b
, __imm
);
9046 __extension__
extern __inline uint8x16_t
9047 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9048 __arm_vshrntq_n_u16 (uint8x16_t __a
, uint16x8_t __b
, const int __imm
)
9050 return __builtin_mve_vshrntq_n_uv8hi (__a
, __b
, __imm
);
9053 __extension__
extern __inline
uint64_t
9054 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9055 __arm_vmlaldavaq_u16 (uint64_t __a
, uint16x8_t __b
, uint16x8_t __c
)
9057 return __builtin_mve_vmlaldavaq_uv8hi (__a
, __b
, __c
);
9060 __extension__
extern __inline
uint64_t
9061 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9062 __arm_vmlaldavq_p_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
9064 return __builtin_mve_vmlaldavq_p_uv8hi (__a
, __b
, __p
);
9067 __extension__
extern __inline uint16x8_t
9068 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9069 __arm_vmovlbq_m_u8 (uint16x8_t __inactive
, uint8x16_t __a
, mve_pred16_t __p
)
9071 return __builtin_mve_vmovlbq_m_uv16qi (__inactive
, __a
, __p
);
9074 __extension__
extern __inline uint16x8_t
9075 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9076 __arm_vmovltq_m_u8 (uint16x8_t __inactive
, uint8x16_t __a
, mve_pred16_t __p
)
9078 return __builtin_mve_vmovltq_m_uv16qi (__inactive
, __a
, __p
);
9081 __extension__
extern __inline uint8x16_t
9082 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9083 __arm_vmovnbq_m_u16 (uint8x16_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
9085 return __builtin_mve_vmovnbq_m_uv8hi (__a
, __b
, __p
);
9088 __extension__
extern __inline uint8x16_t
9089 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9090 __arm_vmovntq_m_u16 (uint8x16_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
9092 return __builtin_mve_vmovntq_m_uv8hi (__a
, __b
, __p
);
9095 __extension__
extern __inline uint8x16_t
9096 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9097 __arm_vqmovnbq_m_u16 (uint8x16_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
9099 return __builtin_mve_vqmovnbq_m_uv8hi (__a
, __b
, __p
);
9102 __extension__
extern __inline uint8x16_t
9103 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9104 __arm_vqmovntq_m_u16 (uint8x16_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
9106 return __builtin_mve_vqmovntq_m_uv8hi (__a
, __b
, __p
);
9109 __extension__
extern __inline uint8x16_t
9110 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9111 __arm_vrev32q_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, mve_pred16_t __p
)
9113 return __builtin_mve_vrev32q_m_uv16qi (__inactive
, __a
, __p
);
9116 __extension__
extern __inline int32x4_t
9117 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9118 __arm_vmvnq_m_n_s32 (int32x4_t __inactive
, const int __imm
, mve_pred16_t __p
)
9120 return __builtin_mve_vmvnq_m_n_sv4si (__inactive
, __imm
, __p
);
9123 __extension__
extern __inline int32x4_t
9124 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9125 __arm_vorrq_m_n_s32 (int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
9127 return __builtin_mve_vorrq_m_n_sv4si (__a
, __imm
, __p
);
9130 __extension__
extern __inline int16x8_t
9131 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9132 __arm_vqrshrntq_n_s32 (int16x8_t __a
, int32x4_t __b
, const int __imm
)
9134 return __builtin_mve_vqrshrntq_n_sv4si (__a
, __b
, __imm
);
9137 __extension__
extern __inline int16x8_t
9138 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9139 __arm_vqshrnbq_n_s32 (int16x8_t __a
, int32x4_t __b
, const int __imm
)
9141 return __builtin_mve_vqshrnbq_n_sv4si (__a
, __b
, __imm
);
9144 __extension__
extern __inline int16x8_t
9145 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9146 __arm_vqshrntq_n_s32 (int16x8_t __a
, int32x4_t __b
, const int __imm
)
9148 return __builtin_mve_vqshrntq_n_sv4si (__a
, __b
, __imm
);
9151 __extension__
extern __inline int16x8_t
9152 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9153 __arm_vrshrnbq_n_s32 (int16x8_t __a
, int32x4_t __b
, const int __imm
)
9155 return __builtin_mve_vrshrnbq_n_sv4si (__a
, __b
, __imm
);
9158 __extension__
extern __inline int16x8_t
9159 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9160 __arm_vrshrntq_n_s32 (int16x8_t __a
, int32x4_t __b
, const int __imm
)
9162 return __builtin_mve_vrshrntq_n_sv4si (__a
, __b
, __imm
);
9165 __extension__
extern __inline int16x8_t
9166 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9167 __arm_vshrnbq_n_s32 (int16x8_t __a
, int32x4_t __b
, const int __imm
)
9169 return __builtin_mve_vshrnbq_n_sv4si (__a
, __b
, __imm
);
9172 __extension__
extern __inline int16x8_t
9173 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9174 __arm_vshrntq_n_s32 (int16x8_t __a
, int32x4_t __b
, const int __imm
)
9176 return __builtin_mve_vshrntq_n_sv4si (__a
, __b
, __imm
);
9179 __extension__
extern __inline
int64_t
9180 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9181 __arm_vmlaldavaq_s32 (int64_t __a
, int32x4_t __b
, int32x4_t __c
)
9183 return __builtin_mve_vmlaldavaq_sv4si (__a
, __b
, __c
);
9186 __extension__
extern __inline
int64_t
9187 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9188 __arm_vmlaldavaxq_s32 (int64_t __a
, int32x4_t __b
, int32x4_t __c
)
9190 return __builtin_mve_vmlaldavaxq_sv4si (__a
, __b
, __c
);
9193 __extension__
extern __inline
int64_t
9194 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9195 __arm_vmlsldavaq_s32 (int64_t __a
, int32x4_t __b
, int32x4_t __c
)
9197 return __builtin_mve_vmlsldavaq_sv4si (__a
, __b
, __c
);
9200 __extension__
extern __inline
int64_t
9201 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9202 __arm_vmlsldavaxq_s32 (int64_t __a
, int32x4_t __b
, int32x4_t __c
)
9204 return __builtin_mve_vmlsldavaxq_sv4si (__a
, __b
, __c
);
9207 __extension__
extern __inline
int64_t
9208 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9209 __arm_vmlaldavq_p_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9211 return __builtin_mve_vmlaldavq_p_sv4si (__a
, __b
, __p
);
9214 __extension__
extern __inline
int64_t
9215 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9216 __arm_vmlaldavxq_p_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9218 return __builtin_mve_vmlaldavxq_p_sv4si (__a
, __b
, __p
);
9221 __extension__
extern __inline
int64_t
9222 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9223 __arm_vmlsldavq_p_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9225 return __builtin_mve_vmlsldavq_p_sv4si (__a
, __b
, __p
);
9228 __extension__
extern __inline
int64_t
9229 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9230 __arm_vmlsldavxq_p_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9232 return __builtin_mve_vmlsldavxq_p_sv4si (__a
, __b
, __p
);
9235 __extension__
extern __inline int32x4_t
9236 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9237 __arm_vmovlbq_m_s16 (int32x4_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
9239 return __builtin_mve_vmovlbq_m_sv8hi (__inactive
, __a
, __p
);
9242 __extension__
extern __inline int32x4_t
9243 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9244 __arm_vmovltq_m_s16 (int32x4_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
9246 return __builtin_mve_vmovltq_m_sv8hi (__inactive
, __a
, __p
);
9249 __extension__
extern __inline int16x8_t
9250 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9251 __arm_vmovnbq_m_s32 (int16x8_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9253 return __builtin_mve_vmovnbq_m_sv4si (__a
, __b
, __p
);
9256 __extension__
extern __inline int16x8_t
9257 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9258 __arm_vmovntq_m_s32 (int16x8_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9260 return __builtin_mve_vmovntq_m_sv4si (__a
, __b
, __p
);
9263 __extension__
extern __inline int16x8_t
9264 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9265 __arm_vqmovnbq_m_s32 (int16x8_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9267 return __builtin_mve_vqmovnbq_m_sv4si (__a
, __b
, __p
);
9270 __extension__
extern __inline int16x8_t
9271 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9272 __arm_vqmovntq_m_s32 (int16x8_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9274 return __builtin_mve_vqmovntq_m_sv4si (__a
, __b
, __p
);
9277 __extension__
extern __inline int16x8_t
9278 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9279 __arm_vrev32q_m_s16 (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
9281 return __builtin_mve_vrev32q_m_sv8hi (__inactive
, __a
, __p
);
9284 __extension__
extern __inline uint32x4_t
9285 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9286 __arm_vmvnq_m_n_u32 (uint32x4_t __inactive
, const int __imm
, mve_pred16_t __p
)
9288 return __builtin_mve_vmvnq_m_n_uv4si (__inactive
, __imm
, __p
);
9291 __extension__
extern __inline uint32x4_t
9292 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9293 __arm_vorrq_m_n_u32 (uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
9295 return __builtin_mve_vorrq_m_n_uv4si (__a
, __imm
, __p
);
9298 __extension__
extern __inline uint16x8_t
9299 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9300 __arm_vqrshruntq_n_s32 (uint16x8_t __a
, int32x4_t __b
, const int __imm
)
9302 return __builtin_mve_vqrshruntq_n_sv4si (__a
, __b
, __imm
);
9305 __extension__
extern __inline uint16x8_t
9306 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9307 __arm_vqshrunbq_n_s32 (uint16x8_t __a
, int32x4_t __b
, const int __imm
)
9309 return __builtin_mve_vqshrunbq_n_sv4si (__a
, __b
, __imm
);
9312 __extension__
extern __inline uint16x8_t
9313 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9314 __arm_vqshruntq_n_s32 (uint16x8_t __a
, int32x4_t __b
, const int __imm
)
9316 return __builtin_mve_vqshruntq_n_sv4si (__a
, __b
, __imm
);
9319 __extension__
extern __inline uint16x8_t
9320 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9321 __arm_vqmovunbq_m_s32 (uint16x8_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9323 return __builtin_mve_vqmovunbq_m_sv4si (__a
, __b
, __p
);
9326 __extension__
extern __inline uint16x8_t
9327 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9328 __arm_vqmovuntq_m_s32 (uint16x8_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9330 return __builtin_mve_vqmovuntq_m_sv4si (__a
, __b
, __p
);
9333 __extension__
extern __inline uint16x8_t
9334 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9335 __arm_vqrshrntq_n_u32 (uint16x8_t __a
, uint32x4_t __b
, const int __imm
)
9337 return __builtin_mve_vqrshrntq_n_uv4si (__a
, __b
, __imm
);
9340 __extension__
extern __inline uint16x8_t
9341 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9342 __arm_vqshrnbq_n_u32 (uint16x8_t __a
, uint32x4_t __b
, const int __imm
)
9344 return __builtin_mve_vqshrnbq_n_uv4si (__a
, __b
, __imm
);
9347 __extension__
extern __inline uint16x8_t
9348 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9349 __arm_vqshrntq_n_u32 (uint16x8_t __a
, uint32x4_t __b
, const int __imm
)
9351 return __builtin_mve_vqshrntq_n_uv4si (__a
, __b
, __imm
);
9354 __extension__
extern __inline uint16x8_t
9355 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9356 __arm_vrshrnbq_n_u32 (uint16x8_t __a
, uint32x4_t __b
, const int __imm
)
9358 return __builtin_mve_vrshrnbq_n_uv4si (__a
, __b
, __imm
);
9361 __extension__
extern __inline uint16x8_t
9362 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9363 __arm_vrshrntq_n_u32 (uint16x8_t __a
, uint32x4_t __b
, const int __imm
)
9365 return __builtin_mve_vrshrntq_n_uv4si (__a
, __b
, __imm
);
9368 __extension__
extern __inline uint16x8_t
9369 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9370 __arm_vshrnbq_n_u32 (uint16x8_t __a
, uint32x4_t __b
, const int __imm
)
9372 return __builtin_mve_vshrnbq_n_uv4si (__a
, __b
, __imm
);
9375 __extension__
extern __inline uint16x8_t
9376 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9377 __arm_vshrntq_n_u32 (uint16x8_t __a
, uint32x4_t __b
, const int __imm
)
9379 return __builtin_mve_vshrntq_n_uv4si (__a
, __b
, __imm
);
9382 __extension__
extern __inline
uint64_t
9383 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9384 __arm_vmlaldavaq_u32 (uint64_t __a
, uint32x4_t __b
, uint32x4_t __c
)
9386 return __builtin_mve_vmlaldavaq_uv4si (__a
, __b
, __c
);
9389 __extension__
extern __inline
uint64_t
9390 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9391 __arm_vmlaldavq_p_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
9393 return __builtin_mve_vmlaldavq_p_uv4si (__a
, __b
, __p
);
9396 __extension__
extern __inline uint32x4_t
9397 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9398 __arm_vmovlbq_m_u16 (uint32x4_t __inactive
, uint16x8_t __a
, mve_pred16_t __p
)
9400 return __builtin_mve_vmovlbq_m_uv8hi (__inactive
, __a
, __p
);
9403 __extension__
extern __inline uint32x4_t
9404 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9405 __arm_vmovltq_m_u16 (uint32x4_t __inactive
, uint16x8_t __a
, mve_pred16_t __p
)
9407 return __builtin_mve_vmovltq_m_uv8hi (__inactive
, __a
, __p
);
9410 __extension__
extern __inline uint16x8_t
9411 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9412 __arm_vmovnbq_m_u32 (uint16x8_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
9414 return __builtin_mve_vmovnbq_m_uv4si (__a
, __b
, __p
);
9417 __extension__
extern __inline uint16x8_t
9418 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9419 __arm_vmovntq_m_u32 (uint16x8_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
9421 return __builtin_mve_vmovntq_m_uv4si (__a
, __b
, __p
);
9424 __extension__
extern __inline uint16x8_t
9425 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9426 __arm_vqmovnbq_m_u32 (uint16x8_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
9428 return __builtin_mve_vqmovnbq_m_uv4si (__a
, __b
, __p
);
9431 __extension__
extern __inline uint16x8_t
9432 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9433 __arm_vqmovntq_m_u32 (uint16x8_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
9435 return __builtin_mve_vqmovntq_m_uv4si (__a
, __b
, __p
);
9438 __extension__
extern __inline uint16x8_t
9439 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9440 __arm_vrev32q_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, mve_pred16_t __p
)
9442 return __builtin_mve_vrev32q_m_uv8hi (__inactive
, __a
, __p
);
9445 __extension__
extern __inline int8x16_t
9446 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9447 __arm_vsriq_m_n_s8 (int8x16_t __a
, int8x16_t __b
, const int __imm
, mve_pred16_t __p
)
9449 return __builtin_mve_vsriq_m_n_sv16qi (__a
, __b
, __imm
, __p
);
9452 __extension__
extern __inline int8x16_t
9453 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9454 __arm_vsubq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
9456 return __builtin_mve_vsubq_m_sv16qi (__inactive
, __a
, __b
, __p
);
9459 __extension__
extern __inline uint8x16_t
9460 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9461 __arm_vqshluq_m_n_s8 (uint8x16_t __inactive
, int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
9463 return __builtin_mve_vqshluq_m_n_sv16qi (__inactive
, __a
, __imm
, __p
);
9466 __extension__
extern __inline
uint32_t
9467 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9468 __arm_vabavq_p_s8 (uint32_t __a
, int8x16_t __b
, int8x16_t __c
, mve_pred16_t __p
)
9470 return __builtin_mve_vabavq_p_sv16qi (__a
, __b
, __c
, __p
);
9473 __extension__
extern __inline uint8x16_t
9474 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9475 __arm_vsriq_m_n_u8 (uint8x16_t __a
, uint8x16_t __b
, const int __imm
, mve_pred16_t __p
)
9477 return __builtin_mve_vsriq_m_n_uv16qi (__a
, __b
, __imm
, __p
);
9480 __extension__
extern __inline uint8x16_t
9481 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9482 __arm_vshlq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
9484 return __builtin_mve_vshlq_m_uv16qi (__inactive
, __a
, __b
, __p
);
9487 __extension__
extern __inline uint8x16_t
9488 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9489 __arm_vsubq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
9491 return __builtin_mve_vsubq_m_uv16qi (__inactive
, __a
, __b
, __p
);
9494 __extension__
extern __inline
uint32_t
9495 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9496 __arm_vabavq_p_u8 (uint32_t __a
, uint8x16_t __b
, uint8x16_t __c
, mve_pred16_t __p
)
9498 return __builtin_mve_vabavq_p_uv16qi (__a
, __b
, __c
, __p
);
9501 __extension__
extern __inline int8x16_t
9502 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9503 __arm_vshlq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
9505 return __builtin_mve_vshlq_m_sv16qi (__inactive
, __a
, __b
, __p
);
9508 __extension__
extern __inline int16x8_t
9509 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9510 __arm_vsriq_m_n_s16 (int16x8_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
9512 return __builtin_mve_vsriq_m_n_sv8hi (__a
, __b
, __imm
, __p
);
9515 __extension__
extern __inline int16x8_t
9516 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9517 __arm_vsubq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
9519 return __builtin_mve_vsubq_m_sv8hi (__inactive
, __a
, __b
, __p
);
9522 __extension__
extern __inline uint16x8_t
9523 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9524 __arm_vqshluq_m_n_s16 (uint16x8_t __inactive
, int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
9526 return __builtin_mve_vqshluq_m_n_sv8hi (__inactive
, __a
, __imm
, __p
);
9529 __extension__
extern __inline
uint32_t
9530 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9531 __arm_vabavq_p_s16 (uint32_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
9533 return __builtin_mve_vabavq_p_sv8hi (__a
, __b
, __c
, __p
);
9536 __extension__
extern __inline uint16x8_t
9537 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9538 __arm_vsriq_m_n_u16 (uint16x8_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
9540 return __builtin_mve_vsriq_m_n_uv8hi (__a
, __b
, __imm
, __p
);
9543 __extension__
extern __inline uint16x8_t
9544 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9545 __arm_vshlq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
9547 return __builtin_mve_vshlq_m_uv8hi (__inactive
, __a
, __b
, __p
);
9550 __extension__
extern __inline uint16x8_t
9551 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9552 __arm_vsubq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
9554 return __builtin_mve_vsubq_m_uv8hi (__inactive
, __a
, __b
, __p
);
9557 __extension__
extern __inline
uint32_t
9558 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9559 __arm_vabavq_p_u16 (uint32_t __a
, uint16x8_t __b
, uint16x8_t __c
, mve_pred16_t __p
)
9561 return __builtin_mve_vabavq_p_uv8hi (__a
, __b
, __c
, __p
);
9564 __extension__
extern __inline int16x8_t
9565 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9566 __arm_vshlq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
9568 return __builtin_mve_vshlq_m_sv8hi (__inactive
, __a
, __b
, __p
);
9571 __extension__
extern __inline int32x4_t
9572 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9573 __arm_vsriq_m_n_s32 (int32x4_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
9575 return __builtin_mve_vsriq_m_n_sv4si (__a
, __b
, __imm
, __p
);
9578 __extension__
extern __inline int32x4_t
9579 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9580 __arm_vsubq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9582 return __builtin_mve_vsubq_m_sv4si (__inactive
, __a
, __b
, __p
);
9585 __extension__
extern __inline uint32x4_t
9586 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9587 __arm_vqshluq_m_n_s32 (uint32x4_t __inactive
, int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
9589 return __builtin_mve_vqshluq_m_n_sv4si (__inactive
, __a
, __imm
, __p
);
9592 __extension__
extern __inline
uint32_t
9593 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9594 __arm_vabavq_p_s32 (uint32_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
9596 return __builtin_mve_vabavq_p_sv4si (__a
, __b
, __c
, __p
);
9599 __extension__
extern __inline uint32x4_t
9600 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9601 __arm_vsriq_m_n_u32 (uint32x4_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
9603 return __builtin_mve_vsriq_m_n_uv4si (__a
, __b
, __imm
, __p
);
9606 __extension__
extern __inline uint32x4_t
9607 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9608 __arm_vshlq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9610 return __builtin_mve_vshlq_m_uv4si (__inactive
, __a
, __b
, __p
);
9613 __extension__
extern __inline uint32x4_t
9614 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9615 __arm_vsubq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
9617 return __builtin_mve_vsubq_m_uv4si (__inactive
, __a
, __b
, __p
);
9620 __extension__
extern __inline
uint32_t
9621 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9622 __arm_vabavq_p_u32 (uint32_t __a
, uint32x4_t __b
, uint32x4_t __c
, mve_pred16_t __p
)
9624 return __builtin_mve_vabavq_p_uv4si (__a
, __b
, __c
, __p
);
9627 __extension__
extern __inline int32x4_t
9628 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9629 __arm_vshlq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9631 return __builtin_mve_vshlq_m_sv4si (__inactive
, __a
, __b
, __p
);
9634 __extension__
extern __inline int8x16_t
9635 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9636 __arm_vabdq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
9638 return __builtin_mve_vabdq_m_sv16qi (__inactive
, __a
, __b
, __p
);
9641 __extension__
extern __inline int32x4_t
9642 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9643 __arm_vabdq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9645 return __builtin_mve_vabdq_m_sv4si (__inactive
, __a
, __b
, __p
);
9648 __extension__
extern __inline int16x8_t
9649 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9650 __arm_vabdq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
9652 return __builtin_mve_vabdq_m_sv8hi (__inactive
, __a
, __b
, __p
);
9655 __extension__
extern __inline uint8x16_t
9656 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9657 __arm_vabdq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
9659 return __builtin_mve_vabdq_m_uv16qi (__inactive
, __a
, __b
, __p
);
9662 __extension__
extern __inline uint32x4_t
9663 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9664 __arm_vabdq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
9666 return __builtin_mve_vabdq_m_uv4si (__inactive
, __a
, __b
, __p
);
9669 __extension__
extern __inline uint16x8_t
9670 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9671 __arm_vabdq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
9673 return __builtin_mve_vabdq_m_uv8hi (__inactive
, __a
, __b
, __p
);
9676 __extension__
extern __inline int8x16_t
9677 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9678 __arm_vaddq_m_n_s8 (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
9680 return __builtin_mve_vaddq_m_n_sv16qi (__inactive
, __a
, __b
, __p
);
9683 __extension__
extern __inline int32x4_t
9684 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9685 __arm_vaddq_m_n_s32 (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
9687 return __builtin_mve_vaddq_m_n_sv4si (__inactive
, __a
, __b
, __p
);
9690 __extension__
extern __inline int16x8_t
9691 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9692 __arm_vaddq_m_n_s16 (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
9694 return __builtin_mve_vaddq_m_n_sv8hi (__inactive
, __a
, __b
, __p
);
9697 __extension__
extern __inline uint8x16_t
9698 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9699 __arm_vaddq_m_n_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
9701 return __builtin_mve_vaddq_m_n_uv16qi (__inactive
, __a
, __b
, __p
);
9704 __extension__
extern __inline uint32x4_t
9705 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9706 __arm_vaddq_m_n_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
9708 return __builtin_mve_vaddq_m_n_uv4si (__inactive
, __a
, __b
, __p
);
9711 __extension__
extern __inline uint16x8_t
9712 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9713 __arm_vaddq_m_n_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
9715 return __builtin_mve_vaddq_m_n_uv8hi (__inactive
, __a
, __b
, __p
);
9718 __extension__
extern __inline int8x16_t
9719 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9720 __arm_vaddq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
9722 return __builtin_mve_vaddq_m_sv16qi (__inactive
, __a
, __b
, __p
);
9725 __extension__
extern __inline int32x4_t
9726 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9727 __arm_vaddq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9729 return __builtin_mve_vaddq_m_sv4si (__inactive
, __a
, __b
, __p
);
9732 __extension__
extern __inline int16x8_t
9733 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9734 __arm_vaddq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
9736 return __builtin_mve_vaddq_m_sv8hi (__inactive
, __a
, __b
, __p
);
9739 __extension__
extern __inline uint8x16_t
9740 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9741 __arm_vaddq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
9743 return __builtin_mve_vaddq_m_uv16qi (__inactive
, __a
, __b
, __p
);
9746 __extension__
extern __inline uint32x4_t
9747 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9748 __arm_vaddq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
9750 return __builtin_mve_vaddq_m_uv4si (__inactive
, __a
, __b
, __p
);
9753 __extension__
extern __inline uint16x8_t
9754 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9755 __arm_vaddq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
9757 return __builtin_mve_vaddq_m_uv8hi (__inactive
, __a
, __b
, __p
);
9760 __extension__
extern __inline int8x16_t
9761 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9762 __arm_vandq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
9764 return __builtin_mve_vandq_m_sv16qi (__inactive
, __a
, __b
, __p
);
9767 __extension__
extern __inline int32x4_t
9768 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9769 __arm_vandq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9771 return __builtin_mve_vandq_m_sv4si (__inactive
, __a
, __b
, __p
);
9774 __extension__
extern __inline int16x8_t
9775 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9776 __arm_vandq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
9778 return __builtin_mve_vandq_m_sv8hi (__inactive
, __a
, __b
, __p
);
9781 __extension__
extern __inline uint8x16_t
9782 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9783 __arm_vandq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
9785 return __builtin_mve_vandq_m_uv16qi (__inactive
, __a
, __b
, __p
);
9788 __extension__
extern __inline uint32x4_t
9789 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9790 __arm_vandq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
9792 return __builtin_mve_vandq_m_uv4si (__inactive
, __a
, __b
, __p
);
9795 __extension__
extern __inline uint16x8_t
9796 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9797 __arm_vandq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
9799 return __builtin_mve_vandq_m_uv8hi (__inactive
, __a
, __b
, __p
);
9802 __extension__
extern __inline int8x16_t
9803 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9804 __arm_vbicq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
9806 return __builtin_mve_vbicq_m_sv16qi (__inactive
, __a
, __b
, __p
);
9809 __extension__
extern __inline int32x4_t
9810 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9811 __arm_vbicq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9813 return __builtin_mve_vbicq_m_sv4si (__inactive
, __a
, __b
, __p
);
9816 __extension__
extern __inline int16x8_t
9817 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9818 __arm_vbicq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
9820 return __builtin_mve_vbicq_m_sv8hi (__inactive
, __a
, __b
, __p
);
9823 __extension__
extern __inline uint8x16_t
9824 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9825 __arm_vbicq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
9827 return __builtin_mve_vbicq_m_uv16qi (__inactive
, __a
, __b
, __p
);
9830 __extension__
extern __inline uint32x4_t
9831 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9832 __arm_vbicq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
9834 return __builtin_mve_vbicq_m_uv4si (__inactive
, __a
, __b
, __p
);
9837 __extension__
extern __inline uint16x8_t
9838 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9839 __arm_vbicq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
9841 return __builtin_mve_vbicq_m_uv8hi (__inactive
, __a
, __b
, __p
);
9844 __extension__
extern __inline int8x16_t
9845 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9846 __arm_vbrsrq_m_n_s8 (int8x16_t __inactive
, int8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
9848 return __builtin_mve_vbrsrq_m_n_sv16qi (__inactive
, __a
, __b
, __p
);
9851 __extension__
extern __inline int32x4_t
9852 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9853 __arm_vbrsrq_m_n_s32 (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
9855 return __builtin_mve_vbrsrq_m_n_sv4si (__inactive
, __a
, __b
, __p
);
9858 __extension__
extern __inline int16x8_t
9859 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9860 __arm_vbrsrq_m_n_s16 (int16x8_t __inactive
, int16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
9862 return __builtin_mve_vbrsrq_m_n_sv8hi (__inactive
, __a
, __b
, __p
);
9865 __extension__
extern __inline uint8x16_t
9866 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9867 __arm_vbrsrq_m_n_u8 (uint8x16_t __inactive
, uint8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
9869 return __builtin_mve_vbrsrq_m_n_uv16qi (__inactive
, __a
, __b
, __p
);
9872 __extension__
extern __inline uint32x4_t
9873 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9874 __arm_vbrsrq_m_n_u32 (uint32x4_t __inactive
, uint32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
9876 return __builtin_mve_vbrsrq_m_n_uv4si (__inactive
, __a
, __b
, __p
);
9879 __extension__
extern __inline uint16x8_t
9880 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9881 __arm_vbrsrq_m_n_u16 (uint16x8_t __inactive
, uint16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
9883 return __builtin_mve_vbrsrq_m_n_uv8hi (__inactive
, __a
, __b
, __p
);
9886 __extension__
extern __inline int8x16_t
9887 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9888 __arm_vcaddq_rot270_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
9890 return __builtin_mve_vcaddq_rot270_m_sv16qi (__inactive
, __a
, __b
, __p
);
9893 __extension__
extern __inline int32x4_t
9894 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9895 __arm_vcaddq_rot270_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9897 return __builtin_mve_vcaddq_rot270_m_sv4si (__inactive
, __a
, __b
, __p
);
9900 __extension__
extern __inline int16x8_t
9901 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9902 __arm_vcaddq_rot270_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
9904 return __builtin_mve_vcaddq_rot270_m_sv8hi (__inactive
, __a
, __b
, __p
);
9907 __extension__
extern __inline uint8x16_t
9908 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9909 __arm_vcaddq_rot270_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
9911 return __builtin_mve_vcaddq_rot270_m_uv16qi (__inactive
, __a
, __b
, __p
);
9914 __extension__
extern __inline uint32x4_t
9915 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9916 __arm_vcaddq_rot270_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
9918 return __builtin_mve_vcaddq_rot270_m_uv4si (__inactive
, __a
, __b
, __p
);
9921 __extension__
extern __inline uint16x8_t
9922 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9923 __arm_vcaddq_rot270_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
9925 return __builtin_mve_vcaddq_rot270_m_uv8hi (__inactive
, __a
, __b
, __p
);
9928 __extension__
extern __inline int8x16_t
9929 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9930 __arm_vcaddq_rot90_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
9932 return __builtin_mve_vcaddq_rot90_m_sv16qi (__inactive
, __a
, __b
, __p
);
9935 __extension__
extern __inline int32x4_t
9936 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9937 __arm_vcaddq_rot90_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9939 return __builtin_mve_vcaddq_rot90_m_sv4si (__inactive
, __a
, __b
, __p
);
9942 __extension__
extern __inline int16x8_t
9943 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9944 __arm_vcaddq_rot90_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
9946 return __builtin_mve_vcaddq_rot90_m_sv8hi (__inactive
, __a
, __b
, __p
);
9949 __extension__
extern __inline uint8x16_t
9950 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9951 __arm_vcaddq_rot90_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
9953 return __builtin_mve_vcaddq_rot90_m_uv16qi (__inactive
, __a
, __b
, __p
);
9956 __extension__
extern __inline uint32x4_t
9957 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9958 __arm_vcaddq_rot90_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
9960 return __builtin_mve_vcaddq_rot90_m_uv4si (__inactive
, __a
, __b
, __p
);
9963 __extension__
extern __inline uint16x8_t
9964 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9965 __arm_vcaddq_rot90_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
9967 return __builtin_mve_vcaddq_rot90_m_uv8hi (__inactive
, __a
, __b
, __p
);
9970 __extension__
extern __inline int8x16_t
9971 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9972 __arm_veorq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
9974 return __builtin_mve_veorq_m_sv16qi (__inactive
, __a
, __b
, __p
);
9977 __extension__
extern __inline int32x4_t
9978 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9979 __arm_veorq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
9981 return __builtin_mve_veorq_m_sv4si (__inactive
, __a
, __b
, __p
);
9984 __extension__
extern __inline int16x8_t
9985 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9986 __arm_veorq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
9988 return __builtin_mve_veorq_m_sv8hi (__inactive
, __a
, __b
, __p
);
9991 __extension__
extern __inline uint8x16_t
9992 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
9993 __arm_veorq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
9995 return __builtin_mve_veorq_m_uv16qi (__inactive
, __a
, __b
, __p
);
9998 __extension__
extern __inline uint32x4_t
9999 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10000 __arm_veorq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
10002 return __builtin_mve_veorq_m_uv4si (__inactive
, __a
, __b
, __p
);
10005 __extension__
extern __inline uint16x8_t
10006 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10007 __arm_veorq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
10009 return __builtin_mve_veorq_m_uv8hi (__inactive
, __a
, __b
, __p
);
10012 __extension__
extern __inline int8x16_t
10013 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10014 __arm_vhaddq_m_n_s8 (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
10016 return __builtin_mve_vhaddq_m_n_sv16qi (__inactive
, __a
, __b
, __p
);
10019 __extension__
extern __inline int32x4_t
10020 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10021 __arm_vhaddq_m_n_s32 (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
10023 return __builtin_mve_vhaddq_m_n_sv4si (__inactive
, __a
, __b
, __p
);
10026 __extension__
extern __inline int16x8_t
10027 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10028 __arm_vhaddq_m_n_s16 (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
10030 return __builtin_mve_vhaddq_m_n_sv8hi (__inactive
, __a
, __b
, __p
);
10033 __extension__
extern __inline uint8x16_t
10034 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10035 __arm_vhaddq_m_n_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
10037 return __builtin_mve_vhaddq_m_n_uv16qi (__inactive
, __a
, __b
, __p
);
10040 __extension__
extern __inline uint32x4_t
10041 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10042 __arm_vhaddq_m_n_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
10044 return __builtin_mve_vhaddq_m_n_uv4si (__inactive
, __a
, __b
, __p
);
10047 __extension__
extern __inline uint16x8_t
10048 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10049 __arm_vhaddq_m_n_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
10051 return __builtin_mve_vhaddq_m_n_uv8hi (__inactive
, __a
, __b
, __p
);
10054 __extension__
extern __inline int8x16_t
10055 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10056 __arm_vhaddq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
10058 return __builtin_mve_vhaddq_m_sv16qi (__inactive
, __a
, __b
, __p
);
10061 __extension__
extern __inline int32x4_t
10062 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10063 __arm_vhaddq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
10065 return __builtin_mve_vhaddq_m_sv4si (__inactive
, __a
, __b
, __p
);
10068 __extension__
extern __inline int16x8_t
10069 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10070 __arm_vhaddq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
10072 return __builtin_mve_vhaddq_m_sv8hi (__inactive
, __a
, __b
, __p
);
10075 __extension__
extern __inline uint8x16_t
10076 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10077 __arm_vhaddq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
10079 return __builtin_mve_vhaddq_m_uv16qi (__inactive
, __a
, __b
, __p
);
10082 __extension__
extern __inline uint32x4_t
10083 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10084 __arm_vhaddq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
10086 return __builtin_mve_vhaddq_m_uv4si (__inactive
, __a
, __b
, __p
);
10089 __extension__
extern __inline uint16x8_t
10090 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10091 __arm_vhaddq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
10093 return __builtin_mve_vhaddq_m_uv8hi (__inactive
, __a
, __b
, __p
);
10096 __extension__
extern __inline int8x16_t
10097 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10098 __arm_vhcaddq_rot270_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
10100 return __builtin_mve_vhcaddq_rot270_m_sv16qi (__inactive
, __a
, __b
, __p
);
10103 __extension__
extern __inline int32x4_t
10104 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10105 __arm_vhcaddq_rot270_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
10107 return __builtin_mve_vhcaddq_rot270_m_sv4si (__inactive
, __a
, __b
, __p
);
10110 __extension__
extern __inline int16x8_t
10111 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10112 __arm_vhcaddq_rot270_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
10114 return __builtin_mve_vhcaddq_rot270_m_sv8hi (__inactive
, __a
, __b
, __p
);
10117 __extension__
extern __inline int8x16_t
10118 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10119 __arm_vhcaddq_rot90_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
10121 return __builtin_mve_vhcaddq_rot90_m_sv16qi (__inactive
, __a
, __b
, __p
);
10124 __extension__
extern __inline int32x4_t
10125 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10126 __arm_vhcaddq_rot90_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
10128 return __builtin_mve_vhcaddq_rot90_m_sv4si (__inactive
, __a
, __b
, __p
);
10131 __extension__
extern __inline int16x8_t
10132 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10133 __arm_vhcaddq_rot90_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
10135 return __builtin_mve_vhcaddq_rot90_m_sv8hi (__inactive
, __a
, __b
, __p
);
10138 __extension__
extern __inline int8x16_t
10139 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10140 __arm_vhsubq_m_n_s8 (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
10142 return __builtin_mve_vhsubq_m_n_sv16qi (__inactive
, __a
, __b
, __p
);
10145 __extension__
extern __inline int32x4_t
10146 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10147 __arm_vhsubq_m_n_s32 (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
10149 return __builtin_mve_vhsubq_m_n_sv4si (__inactive
, __a
, __b
, __p
);
10152 __extension__
extern __inline int16x8_t
10153 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10154 __arm_vhsubq_m_n_s16 (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
10156 return __builtin_mve_vhsubq_m_n_sv8hi (__inactive
, __a
, __b
, __p
);
10159 __extension__
extern __inline uint8x16_t
10160 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10161 __arm_vhsubq_m_n_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
10163 return __builtin_mve_vhsubq_m_n_uv16qi (__inactive
, __a
, __b
, __p
);
10166 __extension__
extern __inline uint32x4_t
10167 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10168 __arm_vhsubq_m_n_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
10170 return __builtin_mve_vhsubq_m_n_uv4si (__inactive
, __a
, __b
, __p
);
10173 __extension__
extern __inline uint16x8_t
10174 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10175 __arm_vhsubq_m_n_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
10177 return __builtin_mve_vhsubq_m_n_uv8hi (__inactive
, __a
, __b
, __p
);
10180 __extension__
extern __inline int8x16_t
10181 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10182 __arm_vhsubq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
10184 return __builtin_mve_vhsubq_m_sv16qi (__inactive
, __a
, __b
, __p
);
10187 __extension__
extern __inline int32x4_t
10188 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10189 __arm_vhsubq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
10191 return __builtin_mve_vhsubq_m_sv4si (__inactive
, __a
, __b
, __p
);
10194 __extension__
extern __inline int16x8_t
10195 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10196 __arm_vhsubq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
10198 return __builtin_mve_vhsubq_m_sv8hi (__inactive
, __a
, __b
, __p
);
10201 __extension__
extern __inline uint8x16_t
10202 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10203 __arm_vhsubq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
10205 return __builtin_mve_vhsubq_m_uv16qi (__inactive
, __a
, __b
, __p
);
10208 __extension__
extern __inline uint32x4_t
10209 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10210 __arm_vhsubq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
10212 return __builtin_mve_vhsubq_m_uv4si (__inactive
, __a
, __b
, __p
);
10215 __extension__
extern __inline uint16x8_t
10216 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10217 __arm_vhsubq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
10219 return __builtin_mve_vhsubq_m_uv8hi (__inactive
, __a
, __b
, __p
);
10222 __extension__
extern __inline int8x16_t
10223 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10224 __arm_vmaxq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
10226 return __builtin_mve_vmaxq_m_sv16qi (__inactive
, __a
, __b
, __p
);
10229 __extension__
extern __inline int32x4_t
10230 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10231 __arm_vmaxq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
10233 return __builtin_mve_vmaxq_m_sv4si (__inactive
, __a
, __b
, __p
);
10236 __extension__
extern __inline int16x8_t
10237 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10238 __arm_vmaxq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
10240 return __builtin_mve_vmaxq_m_sv8hi (__inactive
, __a
, __b
, __p
);
10243 __extension__
extern __inline uint8x16_t
10244 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10245 __arm_vmaxq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
10247 return __builtin_mve_vmaxq_m_uv16qi (__inactive
, __a
, __b
, __p
);
10250 __extension__
extern __inline uint32x4_t
10251 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10252 __arm_vmaxq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
10254 return __builtin_mve_vmaxq_m_uv4si (__inactive
, __a
, __b
, __p
);
10257 __extension__
extern __inline uint16x8_t
10258 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10259 __arm_vmaxq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
10261 return __builtin_mve_vmaxq_m_uv8hi (__inactive
, __a
, __b
, __p
);
10264 __extension__
extern __inline int8x16_t
10265 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10266 __arm_vminq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
10268 return __builtin_mve_vminq_m_sv16qi (__inactive
, __a
, __b
, __p
);
10271 __extension__
extern __inline int32x4_t
10272 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10273 __arm_vminq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
10275 return __builtin_mve_vminq_m_sv4si (__inactive
, __a
, __b
, __p
);
10278 __extension__
extern __inline int16x8_t
10279 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10280 __arm_vminq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
10282 return __builtin_mve_vminq_m_sv8hi (__inactive
, __a
, __b
, __p
);
10285 __extension__
extern __inline uint8x16_t
10286 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10287 __arm_vminq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
10289 return __builtin_mve_vminq_m_uv16qi (__inactive
, __a
, __b
, __p
);
10292 __extension__
extern __inline uint32x4_t
10293 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10294 __arm_vminq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
10296 return __builtin_mve_vminq_m_uv4si (__inactive
, __a
, __b
, __p
);
10299 __extension__
extern __inline uint16x8_t
10300 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10301 __arm_vminq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
10303 return __builtin_mve_vminq_m_uv8hi (__inactive
, __a
, __b
, __p
);
10306 __extension__
extern __inline
int32_t
10307 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10308 __arm_vmladavaq_p_s8 (int32_t __a
, int8x16_t __b
, int8x16_t __c
, mve_pred16_t __p
)
10310 return __builtin_mve_vmladavaq_p_sv16qi (__a
, __b
, __c
, __p
);
10313 __extension__
extern __inline
int32_t
10314 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10315 __arm_vmladavaq_p_s32 (int32_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
10317 return __builtin_mve_vmladavaq_p_sv4si (__a
, __b
, __c
, __p
);
10320 __extension__
extern __inline
int32_t
10321 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10322 __arm_vmladavaq_p_s16 (int32_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
10324 return __builtin_mve_vmladavaq_p_sv8hi (__a
, __b
, __c
, __p
);
10327 __extension__
extern __inline
uint32_t
10328 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10329 __arm_vmladavaq_p_u8 (uint32_t __a
, uint8x16_t __b
, uint8x16_t __c
, mve_pred16_t __p
)
10331 return __builtin_mve_vmladavaq_p_uv16qi (__a
, __b
, __c
, __p
);
10334 __extension__
extern __inline
uint32_t
10335 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10336 __arm_vmladavaq_p_u32 (uint32_t __a
, uint32x4_t __b
, uint32x4_t __c
, mve_pred16_t __p
)
10338 return __builtin_mve_vmladavaq_p_uv4si (__a
, __b
, __c
, __p
);
10341 __extension__
extern __inline
uint32_t
10342 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10343 __arm_vmladavaq_p_u16 (uint32_t __a
, uint16x8_t __b
, uint16x8_t __c
, mve_pred16_t __p
)
10345 return __builtin_mve_vmladavaq_p_uv8hi (__a
, __b
, __c
, __p
);
10348 __extension__
extern __inline
int32_t
10349 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10350 __arm_vmladavaxq_p_s8 (int32_t __a
, int8x16_t __b
, int8x16_t __c
, mve_pred16_t __p
)
10352 return __builtin_mve_vmladavaxq_p_sv16qi (__a
, __b
, __c
, __p
);
10355 __extension__
extern __inline
int32_t
10356 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10357 __arm_vmladavaxq_p_s32 (int32_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
10359 return __builtin_mve_vmladavaxq_p_sv4si (__a
, __b
, __c
, __p
);
10362 __extension__
extern __inline
int32_t
10363 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10364 __arm_vmladavaxq_p_s16 (int32_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
10366 return __builtin_mve_vmladavaxq_p_sv8hi (__a
, __b
, __c
, __p
);
10369 __extension__
extern __inline int8x16_t
10370 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10371 __arm_vmlaq_m_n_s8 (int8x16_t __a
, int8x16_t __b
, int8_t __c
, mve_pred16_t __p
)
10373 return __builtin_mve_vmlaq_m_n_sv16qi (__a
, __b
, __c
, __p
);
10376 __extension__
extern __inline int32x4_t
10377 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10378 __arm_vmlaq_m_n_s32 (int32x4_t __a
, int32x4_t __b
, int32_t __c
, mve_pred16_t __p
)
10380 return __builtin_mve_vmlaq_m_n_sv4si (__a
, __b
, __c
, __p
);
10383 __extension__
extern __inline int16x8_t
10384 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10385 __arm_vmlaq_m_n_s16 (int16x8_t __a
, int16x8_t __b
, int16_t __c
, mve_pred16_t __p
)
10387 return __builtin_mve_vmlaq_m_n_sv8hi (__a
, __b
, __c
, __p
);
10390 __extension__
extern __inline uint8x16_t
10391 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10392 __arm_vmlaq_m_n_u8 (uint8x16_t __a
, uint8x16_t __b
, uint8_t __c
, mve_pred16_t __p
)
10394 return __builtin_mve_vmlaq_m_n_uv16qi (__a
, __b
, __c
, __p
);
10397 __extension__
extern __inline uint32x4_t
10398 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10399 __arm_vmlaq_m_n_u32 (uint32x4_t __a
, uint32x4_t __b
, uint32_t __c
, mve_pred16_t __p
)
10401 return __builtin_mve_vmlaq_m_n_uv4si (__a
, __b
, __c
, __p
);
10404 __extension__
extern __inline uint16x8_t
10405 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10406 __arm_vmlaq_m_n_u16 (uint16x8_t __a
, uint16x8_t __b
, uint16_t __c
, mve_pred16_t __p
)
10408 return __builtin_mve_vmlaq_m_n_uv8hi (__a
, __b
, __c
, __p
);
10411 __extension__
extern __inline int8x16_t
10412 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10413 __arm_vmlasq_m_n_s8 (int8x16_t __a
, int8x16_t __b
, int8_t __c
, mve_pred16_t __p
)
10415 return __builtin_mve_vmlasq_m_n_sv16qi (__a
, __b
, __c
, __p
);
10418 __extension__
extern __inline int32x4_t
10419 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10420 __arm_vmlasq_m_n_s32 (int32x4_t __a
, int32x4_t __b
, int32_t __c
, mve_pred16_t __p
)
10422 return __builtin_mve_vmlasq_m_n_sv4si (__a
, __b
, __c
, __p
);
10425 __extension__
extern __inline int16x8_t
10426 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10427 __arm_vmlasq_m_n_s16 (int16x8_t __a
, int16x8_t __b
, int16_t __c
, mve_pred16_t __p
)
10429 return __builtin_mve_vmlasq_m_n_sv8hi (__a
, __b
, __c
, __p
);
10432 __extension__
extern __inline uint8x16_t
10433 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10434 __arm_vmlasq_m_n_u8 (uint8x16_t __a
, uint8x16_t __b
, uint8_t __c
, mve_pred16_t __p
)
10436 return __builtin_mve_vmlasq_m_n_uv16qi (__a
, __b
, __c
, __p
);
10439 __extension__
extern __inline uint32x4_t
10440 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10441 __arm_vmlasq_m_n_u32 (uint32x4_t __a
, uint32x4_t __b
, uint32_t __c
, mve_pred16_t __p
)
10443 return __builtin_mve_vmlasq_m_n_uv4si (__a
, __b
, __c
, __p
);
10446 __extension__
extern __inline uint16x8_t
10447 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10448 __arm_vmlasq_m_n_u16 (uint16x8_t __a
, uint16x8_t __b
, uint16_t __c
, mve_pred16_t __p
)
10450 return __builtin_mve_vmlasq_m_n_uv8hi (__a
, __b
, __c
, __p
);
10453 __extension__
extern __inline
int32_t
10454 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10455 __arm_vmlsdavaq_p_s8 (int32_t __a
, int8x16_t __b
, int8x16_t __c
, mve_pred16_t __p
)
10457 return __builtin_mve_vmlsdavaq_p_sv16qi (__a
, __b
, __c
, __p
);
10460 __extension__
extern __inline
int32_t
10461 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10462 __arm_vmlsdavaq_p_s32 (int32_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
10464 return __builtin_mve_vmlsdavaq_p_sv4si (__a
, __b
, __c
, __p
);
10467 __extension__
extern __inline
int32_t
10468 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10469 __arm_vmlsdavaq_p_s16 (int32_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
10471 return __builtin_mve_vmlsdavaq_p_sv8hi (__a
, __b
, __c
, __p
);
10474 __extension__
extern __inline
int32_t
10475 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10476 __arm_vmlsdavaxq_p_s8 (int32_t __a
, int8x16_t __b
, int8x16_t __c
, mve_pred16_t __p
)
10478 return __builtin_mve_vmlsdavaxq_p_sv16qi (__a
, __b
, __c
, __p
);
10481 __extension__
extern __inline
int32_t
10482 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10483 __arm_vmlsdavaxq_p_s32 (int32_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
10485 return __builtin_mve_vmlsdavaxq_p_sv4si (__a
, __b
, __c
, __p
);
10488 __extension__
extern __inline
int32_t
10489 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10490 __arm_vmlsdavaxq_p_s16 (int32_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
10492 return __builtin_mve_vmlsdavaxq_p_sv8hi (__a
, __b
, __c
, __p
);
10495 __extension__
extern __inline int8x16_t
10496 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10497 __arm_vmulhq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
10499 return __builtin_mve_vmulhq_m_sv16qi (__inactive
, __a
, __b
, __p
);
10502 __extension__
extern __inline int32x4_t
10503 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10504 __arm_vmulhq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
10506 return __builtin_mve_vmulhq_m_sv4si (__inactive
, __a
, __b
, __p
);
10509 __extension__
extern __inline int16x8_t
10510 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10511 __arm_vmulhq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
10513 return __builtin_mve_vmulhq_m_sv8hi (__inactive
, __a
, __b
, __p
);
10516 __extension__
extern __inline uint8x16_t
10517 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10518 __arm_vmulhq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
10520 return __builtin_mve_vmulhq_m_uv16qi (__inactive
, __a
, __b
, __p
);
10523 __extension__
extern __inline uint32x4_t
10524 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10525 __arm_vmulhq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
10527 return __builtin_mve_vmulhq_m_uv4si (__inactive
, __a
, __b
, __p
);
10530 __extension__
extern __inline uint16x8_t
10531 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10532 __arm_vmulhq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
10534 return __builtin_mve_vmulhq_m_uv8hi (__inactive
, __a
, __b
, __p
);
10537 __extension__
extern __inline int16x8_t
10538 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10539 __arm_vmullbq_int_m_s8 (int16x8_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
10541 return __builtin_mve_vmullbq_int_m_sv16qi (__inactive
, __a
, __b
, __p
);
10544 __extension__
extern __inline int64x2_t
10545 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10546 __arm_vmullbq_int_m_s32 (int64x2_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
10548 return __builtin_mve_vmullbq_int_m_sv4si (__inactive
, __a
, __b
, __p
);
10551 __extension__
extern __inline int32x4_t
10552 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10553 __arm_vmullbq_int_m_s16 (int32x4_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
10555 return __builtin_mve_vmullbq_int_m_sv8hi (__inactive
, __a
, __b
, __p
);
10558 __extension__
extern __inline uint16x8_t
10559 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10560 __arm_vmullbq_int_m_u8 (uint16x8_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
10562 return __builtin_mve_vmullbq_int_m_uv16qi (__inactive
, __a
, __b
, __p
);
10565 __extension__
extern __inline uint64x2_t
10566 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10567 __arm_vmullbq_int_m_u32 (uint64x2_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
10569 return __builtin_mve_vmullbq_int_m_uv4si (__inactive
, __a
, __b
, __p
);
10572 __extension__
extern __inline uint32x4_t
10573 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10574 __arm_vmullbq_int_m_u16 (uint32x4_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
10576 return __builtin_mve_vmullbq_int_m_uv8hi (__inactive
, __a
, __b
, __p
);
10579 __extension__
extern __inline int16x8_t
10580 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10581 __arm_vmulltq_int_m_s8 (int16x8_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
10583 return __builtin_mve_vmulltq_int_m_sv16qi (__inactive
, __a
, __b
, __p
);
10586 __extension__
extern __inline int64x2_t
10587 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10588 __arm_vmulltq_int_m_s32 (int64x2_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
10590 return __builtin_mve_vmulltq_int_m_sv4si (__inactive
, __a
, __b
, __p
);
10593 __extension__
extern __inline int32x4_t
10594 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10595 __arm_vmulltq_int_m_s16 (int32x4_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
10597 return __builtin_mve_vmulltq_int_m_sv8hi (__inactive
, __a
, __b
, __p
);
10600 __extension__
extern __inline uint16x8_t
10601 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10602 __arm_vmulltq_int_m_u8 (uint16x8_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
10604 return __builtin_mve_vmulltq_int_m_uv16qi (__inactive
, __a
, __b
, __p
);
10607 __extension__
extern __inline uint64x2_t
10608 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10609 __arm_vmulltq_int_m_u32 (uint64x2_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
10611 return __builtin_mve_vmulltq_int_m_uv4si (__inactive
, __a
, __b
, __p
);
10614 __extension__
extern __inline uint32x4_t
10615 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10616 __arm_vmulltq_int_m_u16 (uint32x4_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
10618 return __builtin_mve_vmulltq_int_m_uv8hi (__inactive
, __a
, __b
, __p
);
10621 __extension__
extern __inline int8x16_t
10622 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10623 __arm_vmulq_m_n_s8 (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
10625 return __builtin_mve_vmulq_m_n_sv16qi (__inactive
, __a
, __b
, __p
);
10628 __extension__
extern __inline int32x4_t
10629 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10630 __arm_vmulq_m_n_s32 (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
10632 return __builtin_mve_vmulq_m_n_sv4si (__inactive
, __a
, __b
, __p
);
10635 __extension__
extern __inline int16x8_t
10636 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10637 __arm_vmulq_m_n_s16 (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
10639 return __builtin_mve_vmulq_m_n_sv8hi (__inactive
, __a
, __b
, __p
);
10642 __extension__
extern __inline uint8x16_t
10643 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10644 __arm_vmulq_m_n_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
10646 return __builtin_mve_vmulq_m_n_uv16qi (__inactive
, __a
, __b
, __p
);
10649 __extension__
extern __inline uint32x4_t
10650 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10651 __arm_vmulq_m_n_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
10653 return __builtin_mve_vmulq_m_n_uv4si (__inactive
, __a
, __b
, __p
);
10656 __extension__
extern __inline uint16x8_t
10657 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10658 __arm_vmulq_m_n_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
10660 return __builtin_mve_vmulq_m_n_uv8hi (__inactive
, __a
, __b
, __p
);
10663 __extension__
extern __inline int8x16_t
10664 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10665 __arm_vmulq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
10667 return __builtin_mve_vmulq_m_sv16qi (__inactive
, __a
, __b
, __p
);
10670 __extension__
extern __inline int32x4_t
10671 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10672 __arm_vmulq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
10674 return __builtin_mve_vmulq_m_sv4si (__inactive
, __a
, __b
, __p
);
10677 __extension__
extern __inline int16x8_t
10678 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10679 __arm_vmulq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
10681 return __builtin_mve_vmulq_m_sv8hi (__inactive
, __a
, __b
, __p
);
10684 __extension__
extern __inline uint8x16_t
10685 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10686 __arm_vmulq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
10688 return __builtin_mve_vmulq_m_uv16qi (__inactive
, __a
, __b
, __p
);
10691 __extension__
extern __inline uint32x4_t
10692 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10693 __arm_vmulq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
10695 return __builtin_mve_vmulq_m_uv4si (__inactive
, __a
, __b
, __p
);
10698 __extension__
extern __inline uint16x8_t
10699 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10700 __arm_vmulq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
10702 return __builtin_mve_vmulq_m_uv8hi (__inactive
, __a
, __b
, __p
);
10705 __extension__
extern __inline int8x16_t
10706 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10707 __arm_vornq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
10709 return __builtin_mve_vornq_m_sv16qi (__inactive
, __a
, __b
, __p
);
10712 __extension__
extern __inline int32x4_t
10713 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10714 __arm_vornq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
10716 return __builtin_mve_vornq_m_sv4si (__inactive
, __a
, __b
, __p
);
10719 __extension__
extern __inline int16x8_t
10720 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10721 __arm_vornq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
10723 return __builtin_mve_vornq_m_sv8hi (__inactive
, __a
, __b
, __p
);
10726 __extension__
extern __inline uint8x16_t
10727 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10728 __arm_vornq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
10730 return __builtin_mve_vornq_m_uv16qi (__inactive
, __a
, __b
, __p
);
10733 __extension__
extern __inline uint32x4_t
10734 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10735 __arm_vornq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
10737 return __builtin_mve_vornq_m_uv4si (__inactive
, __a
, __b
, __p
);
10740 __extension__
extern __inline uint16x8_t
10741 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10742 __arm_vornq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
10744 return __builtin_mve_vornq_m_uv8hi (__inactive
, __a
, __b
, __p
);
10747 __extension__
extern __inline int8x16_t
10748 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10749 __arm_vorrq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
10751 return __builtin_mve_vorrq_m_sv16qi (__inactive
, __a
, __b
, __p
);
10754 __extension__
extern __inline int32x4_t
10755 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10756 __arm_vorrq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
10758 return __builtin_mve_vorrq_m_sv4si (__inactive
, __a
, __b
, __p
);
10761 __extension__
extern __inline int16x8_t
10762 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10763 __arm_vorrq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
10765 return __builtin_mve_vorrq_m_sv8hi (__inactive
, __a
, __b
, __p
);
10768 __extension__
extern __inline uint8x16_t
10769 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10770 __arm_vorrq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
10772 return __builtin_mve_vorrq_m_uv16qi (__inactive
, __a
, __b
, __p
);
10775 __extension__
extern __inline uint32x4_t
10776 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10777 __arm_vorrq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
10779 return __builtin_mve_vorrq_m_uv4si (__inactive
, __a
, __b
, __p
);
10782 __extension__
extern __inline uint16x8_t
10783 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10784 __arm_vorrq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
10786 return __builtin_mve_vorrq_m_uv8hi (__inactive
, __a
, __b
, __p
);
10789 __extension__
extern __inline int8x16_t
10790 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10791 __arm_vqaddq_m_n_s8 (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
10793 return __builtin_mve_vqaddq_m_n_sv16qi (__inactive
, __a
, __b
, __p
);
10796 __extension__
extern __inline int32x4_t
10797 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10798 __arm_vqaddq_m_n_s32 (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
10800 return __builtin_mve_vqaddq_m_n_sv4si (__inactive
, __a
, __b
, __p
);
10803 __extension__
extern __inline int16x8_t
10804 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10805 __arm_vqaddq_m_n_s16 (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
10807 return __builtin_mve_vqaddq_m_n_sv8hi (__inactive
, __a
, __b
, __p
);
10810 __extension__
extern __inline uint8x16_t
10811 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10812 __arm_vqaddq_m_n_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
10814 return __builtin_mve_vqaddq_m_n_uv16qi (__inactive
, __a
, __b
, __p
);
10817 __extension__
extern __inline uint32x4_t
10818 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10819 __arm_vqaddq_m_n_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
10821 return __builtin_mve_vqaddq_m_n_uv4si (__inactive
, __a
, __b
, __p
);
10824 __extension__
extern __inline uint16x8_t
10825 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10826 __arm_vqaddq_m_n_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
10828 return __builtin_mve_vqaddq_m_n_uv8hi (__inactive
, __a
, __b
, __p
);
10831 __extension__
extern __inline int8x16_t
10832 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10833 __arm_vqaddq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
10835 return __builtin_mve_vqaddq_m_sv16qi (__inactive
, __a
, __b
, __p
);
10838 __extension__
extern __inline int32x4_t
10839 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10840 __arm_vqaddq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
10842 return __builtin_mve_vqaddq_m_sv4si (__inactive
, __a
, __b
, __p
);
10845 __extension__
extern __inline int16x8_t
10846 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10847 __arm_vqaddq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
10849 return __builtin_mve_vqaddq_m_sv8hi (__inactive
, __a
, __b
, __p
);
10852 __extension__
extern __inline uint8x16_t
10853 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10854 __arm_vqaddq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
10856 return __builtin_mve_vqaddq_m_uv16qi (__inactive
, __a
, __b
, __p
);
10859 __extension__
extern __inline uint32x4_t
10860 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10861 __arm_vqaddq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
10863 return __builtin_mve_vqaddq_m_uv4si (__inactive
, __a
, __b
, __p
);
10866 __extension__
extern __inline uint16x8_t
10867 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10868 __arm_vqaddq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
10870 return __builtin_mve_vqaddq_m_uv8hi (__inactive
, __a
, __b
, __p
);
10873 __extension__
extern __inline int8x16_t
10874 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10875 __arm_vqdmladhq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
10877 return __builtin_mve_vqdmladhq_m_sv16qi (__inactive
, __a
, __b
, __p
);
10880 __extension__
extern __inline int32x4_t
10881 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10882 __arm_vqdmladhq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
10884 return __builtin_mve_vqdmladhq_m_sv4si (__inactive
, __a
, __b
, __p
);
10887 __extension__
extern __inline int16x8_t
10888 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10889 __arm_vqdmladhq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
10891 return __builtin_mve_vqdmladhq_m_sv8hi (__inactive
, __a
, __b
, __p
);
10894 __extension__
extern __inline int8x16_t
10895 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10896 __arm_vqdmladhxq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
10898 return __builtin_mve_vqdmladhxq_m_sv16qi (__inactive
, __a
, __b
, __p
);
10901 __extension__
extern __inline int32x4_t
10902 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10903 __arm_vqdmladhxq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
10905 return __builtin_mve_vqdmladhxq_m_sv4si (__inactive
, __a
, __b
, __p
);
10908 __extension__
extern __inline int16x8_t
10909 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10910 __arm_vqdmladhxq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
10912 return __builtin_mve_vqdmladhxq_m_sv8hi (__inactive
, __a
, __b
, __p
);
10915 __extension__
extern __inline int8x16_t
10916 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10917 __arm_vqdmlahq_m_n_s8 (int8x16_t __a
, int8x16_t __b
, int8_t __c
, mve_pred16_t __p
)
10919 return __builtin_mve_vqdmlahq_m_n_sv16qi (__a
, __b
, __c
, __p
);
10922 __extension__
extern __inline int32x4_t
10923 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10924 __arm_vqdmlahq_m_n_s32 (int32x4_t __a
, int32x4_t __b
, int32_t __c
, mve_pred16_t __p
)
10926 return __builtin_mve_vqdmlahq_m_n_sv4si (__a
, __b
, __c
, __p
);
10929 __extension__
extern __inline int16x8_t
10930 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10931 __arm_vqdmlahq_m_n_s16 (int16x8_t __a
, int16x8_t __b
, int16_t __c
, mve_pred16_t __p
)
10933 return __builtin_mve_vqdmlahq_m_n_sv8hi (__a
, __b
, __c
, __p
);
10936 __extension__
extern __inline int8x16_t
10937 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10938 __arm_vqdmlsdhq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
10940 return __builtin_mve_vqdmlsdhq_m_sv16qi (__inactive
, __a
, __b
, __p
);
10943 __extension__
extern __inline int32x4_t
10944 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10945 __arm_vqdmlsdhq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
10947 return __builtin_mve_vqdmlsdhq_m_sv4si (__inactive
, __a
, __b
, __p
);
10950 __extension__
extern __inline int16x8_t
10951 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10952 __arm_vqdmlsdhq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
10954 return __builtin_mve_vqdmlsdhq_m_sv8hi (__inactive
, __a
, __b
, __p
);
10957 __extension__
extern __inline int8x16_t
10958 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10959 __arm_vqdmlsdhxq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
10961 return __builtin_mve_vqdmlsdhxq_m_sv16qi (__inactive
, __a
, __b
, __p
);
10964 __extension__
extern __inline int32x4_t
10965 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10966 __arm_vqdmlsdhxq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
10968 return __builtin_mve_vqdmlsdhxq_m_sv4si (__inactive
, __a
, __b
, __p
);
10971 __extension__
extern __inline int16x8_t
10972 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10973 __arm_vqdmlsdhxq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
10975 return __builtin_mve_vqdmlsdhxq_m_sv8hi (__inactive
, __a
, __b
, __p
);
10978 __extension__
extern __inline int8x16_t
10979 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10980 __arm_vqdmulhq_m_n_s8 (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
10982 return __builtin_mve_vqdmulhq_m_n_sv16qi (__inactive
, __a
, __b
, __p
);
10985 __extension__
extern __inline int32x4_t
10986 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10987 __arm_vqdmulhq_m_n_s32 (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
10989 return __builtin_mve_vqdmulhq_m_n_sv4si (__inactive
, __a
, __b
, __p
);
10992 __extension__
extern __inline int16x8_t
10993 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
10994 __arm_vqdmulhq_m_n_s16 (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
10996 return __builtin_mve_vqdmulhq_m_n_sv8hi (__inactive
, __a
, __b
, __p
);
10999 __extension__
extern __inline int8x16_t
11000 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11001 __arm_vqdmulhq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
11003 return __builtin_mve_vqdmulhq_m_sv16qi (__inactive
, __a
, __b
, __p
);
11006 __extension__
extern __inline int32x4_t
11007 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11008 __arm_vqdmulhq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
11010 return __builtin_mve_vqdmulhq_m_sv4si (__inactive
, __a
, __b
, __p
);
11013 __extension__
extern __inline int16x8_t
11014 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11015 __arm_vqdmulhq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
11017 return __builtin_mve_vqdmulhq_m_sv8hi (__inactive
, __a
, __b
, __p
);
11020 __extension__
extern __inline int8x16_t
11021 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11022 __arm_vqrdmladhq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
11024 return __builtin_mve_vqrdmladhq_m_sv16qi (__inactive
, __a
, __b
, __p
);
11027 __extension__
extern __inline int32x4_t
11028 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11029 __arm_vqrdmladhq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
11031 return __builtin_mve_vqrdmladhq_m_sv4si (__inactive
, __a
, __b
, __p
);
11034 __extension__
extern __inline int16x8_t
11035 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11036 __arm_vqrdmladhq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
11038 return __builtin_mve_vqrdmladhq_m_sv8hi (__inactive
, __a
, __b
, __p
);
11041 __extension__
extern __inline int8x16_t
11042 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11043 __arm_vqrdmladhxq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
11045 return __builtin_mve_vqrdmladhxq_m_sv16qi (__inactive
, __a
, __b
, __p
);
11048 __extension__
extern __inline int32x4_t
11049 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11050 __arm_vqrdmladhxq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
11052 return __builtin_mve_vqrdmladhxq_m_sv4si (__inactive
, __a
, __b
, __p
);
11055 __extension__
extern __inline int16x8_t
11056 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11057 __arm_vqrdmladhxq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
11059 return __builtin_mve_vqrdmladhxq_m_sv8hi (__inactive
, __a
, __b
, __p
);
11062 __extension__
extern __inline int8x16_t
11063 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11064 __arm_vqrdmlahq_m_n_s8 (int8x16_t __a
, int8x16_t __b
, int8_t __c
, mve_pred16_t __p
)
11066 return __builtin_mve_vqrdmlahq_m_n_sv16qi (__a
, __b
, __c
, __p
);
11069 __extension__
extern __inline int32x4_t
11070 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11071 __arm_vqrdmlahq_m_n_s32 (int32x4_t __a
, int32x4_t __b
, int32_t __c
, mve_pred16_t __p
)
11073 return __builtin_mve_vqrdmlahq_m_n_sv4si (__a
, __b
, __c
, __p
);
11076 __extension__
extern __inline int16x8_t
11077 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11078 __arm_vqrdmlahq_m_n_s16 (int16x8_t __a
, int16x8_t __b
, int16_t __c
, mve_pred16_t __p
)
11080 return __builtin_mve_vqrdmlahq_m_n_sv8hi (__a
, __b
, __c
, __p
);
11083 __extension__
extern __inline int8x16_t
11084 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11085 __arm_vqrdmlashq_m_n_s8 (int8x16_t __a
, int8x16_t __b
, int8_t __c
, mve_pred16_t __p
)
11087 return __builtin_mve_vqrdmlashq_m_n_sv16qi (__a
, __b
, __c
, __p
);
11090 __extension__
extern __inline int32x4_t
11091 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11092 __arm_vqrdmlashq_m_n_s32 (int32x4_t __a
, int32x4_t __b
, int32_t __c
, mve_pred16_t __p
)
11094 return __builtin_mve_vqrdmlashq_m_n_sv4si (__a
, __b
, __c
, __p
);
11097 __extension__
extern __inline int16x8_t
11098 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11099 __arm_vqrdmlashq_m_n_s16 (int16x8_t __a
, int16x8_t __b
, int16_t __c
, mve_pred16_t __p
)
11101 return __builtin_mve_vqrdmlashq_m_n_sv8hi (__a
, __b
, __c
, __p
);
11104 __extension__
extern __inline int8x16_t
11105 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11106 __arm_vqdmlashq_m_n_s8 (int8x16_t __a
, int8x16_t __b
, int8_t __c
, mve_pred16_t __p
)
11108 return __builtin_mve_vqdmlashq_m_n_sv16qi (__a
, __b
, __c
, __p
);
11111 __extension__
extern __inline int16x8_t
11112 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11113 __arm_vqdmlashq_m_n_s16 (int16x8_t __a
, int16x8_t __b
, int16_t __c
, mve_pred16_t __p
)
11115 return __builtin_mve_vqdmlashq_m_n_sv8hi (__a
, __b
, __c
, __p
);
11118 __extension__
extern __inline int32x4_t
11119 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11120 __arm_vqdmlashq_m_n_s32 (int32x4_t __a
, int32x4_t __b
, int32_t __c
, mve_pred16_t __p
)
11122 return __builtin_mve_vqdmlashq_m_n_sv4si (__a
, __b
, __c
, __p
);
11125 __extension__
extern __inline int8x16_t
11126 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11127 __arm_vqrdmlsdhq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
11129 return __builtin_mve_vqrdmlsdhq_m_sv16qi (__inactive
, __a
, __b
, __p
);
11132 __extension__
extern __inline int32x4_t
11133 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11134 __arm_vqrdmlsdhq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
11136 return __builtin_mve_vqrdmlsdhq_m_sv4si (__inactive
, __a
, __b
, __p
);
11139 __extension__
extern __inline int16x8_t
11140 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11141 __arm_vqrdmlsdhq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
11143 return __builtin_mve_vqrdmlsdhq_m_sv8hi (__inactive
, __a
, __b
, __p
);
11146 __extension__
extern __inline int8x16_t
11147 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11148 __arm_vqrdmlsdhxq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
11150 return __builtin_mve_vqrdmlsdhxq_m_sv16qi (__inactive
, __a
, __b
, __p
);
11153 __extension__
extern __inline int32x4_t
11154 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11155 __arm_vqrdmlsdhxq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
11157 return __builtin_mve_vqrdmlsdhxq_m_sv4si (__inactive
, __a
, __b
, __p
);
11160 __extension__
extern __inline int16x8_t
11161 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11162 __arm_vqrdmlsdhxq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
11164 return __builtin_mve_vqrdmlsdhxq_m_sv8hi (__inactive
, __a
, __b
, __p
);
11167 __extension__
extern __inline int8x16_t
11168 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11169 __arm_vqrdmulhq_m_n_s8 (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
11171 return __builtin_mve_vqrdmulhq_m_n_sv16qi (__inactive
, __a
, __b
, __p
);
11174 __extension__
extern __inline int32x4_t
11175 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11176 __arm_vqrdmulhq_m_n_s32 (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
11178 return __builtin_mve_vqrdmulhq_m_n_sv4si (__inactive
, __a
, __b
, __p
);
11181 __extension__
extern __inline int16x8_t
11182 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11183 __arm_vqrdmulhq_m_n_s16 (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
11185 return __builtin_mve_vqrdmulhq_m_n_sv8hi (__inactive
, __a
, __b
, __p
);
11188 __extension__
extern __inline int8x16_t
11189 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11190 __arm_vqrdmulhq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
11192 return __builtin_mve_vqrdmulhq_m_sv16qi (__inactive
, __a
, __b
, __p
);
11195 __extension__
extern __inline int32x4_t
11196 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11197 __arm_vqrdmulhq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
11199 return __builtin_mve_vqrdmulhq_m_sv4si (__inactive
, __a
, __b
, __p
);
11202 __extension__
extern __inline int16x8_t
11203 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11204 __arm_vqrdmulhq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
11206 return __builtin_mve_vqrdmulhq_m_sv8hi (__inactive
, __a
, __b
, __p
);
11209 __extension__
extern __inline int8x16_t
11210 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11211 __arm_vqrshlq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
11213 return __builtin_mve_vqrshlq_m_sv16qi (__inactive
, __a
, __b
, __p
);
11216 __extension__
extern __inline int32x4_t
11217 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11218 __arm_vqrshlq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
11220 return __builtin_mve_vqrshlq_m_sv4si (__inactive
, __a
, __b
, __p
);
11223 __extension__
extern __inline int16x8_t
11224 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11225 __arm_vqrshlq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
11227 return __builtin_mve_vqrshlq_m_sv8hi (__inactive
, __a
, __b
, __p
);
11230 __extension__
extern __inline uint8x16_t
11231 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11232 __arm_vqrshlq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
11234 return __builtin_mve_vqrshlq_m_uv16qi (__inactive
, __a
, __b
, __p
);
11237 __extension__
extern __inline uint32x4_t
11238 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11239 __arm_vqrshlq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
11241 return __builtin_mve_vqrshlq_m_uv4si (__inactive
, __a
, __b
, __p
);
11244 __extension__
extern __inline uint16x8_t
11245 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11246 __arm_vqrshlq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
11248 return __builtin_mve_vqrshlq_m_uv8hi (__inactive
, __a
, __b
, __p
);
11251 __extension__
extern __inline int8x16_t
11252 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11253 __arm_vqshlq_m_n_s8 (int8x16_t __inactive
, int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
11255 return __builtin_mve_vqshlq_m_n_sv16qi (__inactive
, __a
, __imm
, __p
);
11258 __extension__
extern __inline int32x4_t
11259 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11260 __arm_vqshlq_m_n_s32 (int32x4_t __inactive
, int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
11262 return __builtin_mve_vqshlq_m_n_sv4si (__inactive
, __a
, __imm
, __p
);
11265 __extension__
extern __inline int16x8_t
11266 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11267 __arm_vqshlq_m_n_s16 (int16x8_t __inactive
, int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
11269 return __builtin_mve_vqshlq_m_n_sv8hi (__inactive
, __a
, __imm
, __p
);
11272 __extension__
extern __inline uint8x16_t
11273 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11274 __arm_vqshlq_m_n_u8 (uint8x16_t __inactive
, uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
11276 return __builtin_mve_vqshlq_m_n_uv16qi (__inactive
, __a
, __imm
, __p
);
11279 __extension__
extern __inline uint32x4_t
11280 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11281 __arm_vqshlq_m_n_u32 (uint32x4_t __inactive
, uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
11283 return __builtin_mve_vqshlq_m_n_uv4si (__inactive
, __a
, __imm
, __p
);
11286 __extension__
extern __inline uint16x8_t
11287 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11288 __arm_vqshlq_m_n_u16 (uint16x8_t __inactive
, uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
11290 return __builtin_mve_vqshlq_m_n_uv8hi (__inactive
, __a
, __imm
, __p
);
11293 __extension__
extern __inline int8x16_t
11294 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11295 __arm_vqshlq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
11297 return __builtin_mve_vqshlq_m_sv16qi (__inactive
, __a
, __b
, __p
);
11300 __extension__
extern __inline int32x4_t
11301 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11302 __arm_vqshlq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
11304 return __builtin_mve_vqshlq_m_sv4si (__inactive
, __a
, __b
, __p
);
11307 __extension__
extern __inline int16x8_t
11308 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11309 __arm_vqshlq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
11311 return __builtin_mve_vqshlq_m_sv8hi (__inactive
, __a
, __b
, __p
);
11314 __extension__
extern __inline uint8x16_t
11315 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11316 __arm_vqshlq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
11318 return __builtin_mve_vqshlq_m_uv16qi (__inactive
, __a
, __b
, __p
);
11321 __extension__
extern __inline uint32x4_t
11322 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11323 __arm_vqshlq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
11325 return __builtin_mve_vqshlq_m_uv4si (__inactive
, __a
, __b
, __p
);
11328 __extension__
extern __inline uint16x8_t
11329 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11330 __arm_vqshlq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
11332 return __builtin_mve_vqshlq_m_uv8hi (__inactive
, __a
, __b
, __p
);
11335 __extension__
extern __inline int8x16_t
11336 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11337 __arm_vqsubq_m_n_s8 (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
11339 return __builtin_mve_vqsubq_m_n_sv16qi (__inactive
, __a
, __b
, __p
);
11342 __extension__
extern __inline int32x4_t
11343 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11344 __arm_vqsubq_m_n_s32 (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
11346 return __builtin_mve_vqsubq_m_n_sv4si (__inactive
, __a
, __b
, __p
);
11349 __extension__
extern __inline int16x8_t
11350 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11351 __arm_vqsubq_m_n_s16 (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
11353 return __builtin_mve_vqsubq_m_n_sv8hi (__inactive
, __a
, __b
, __p
);
11356 __extension__
extern __inline uint8x16_t
11357 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11358 __arm_vqsubq_m_n_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
11360 return __builtin_mve_vqsubq_m_n_uv16qi (__inactive
, __a
, __b
, __p
);
11363 __extension__
extern __inline uint32x4_t
11364 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11365 __arm_vqsubq_m_n_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
11367 return __builtin_mve_vqsubq_m_n_uv4si (__inactive
, __a
, __b
, __p
);
11370 __extension__
extern __inline uint16x8_t
11371 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11372 __arm_vqsubq_m_n_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
11374 return __builtin_mve_vqsubq_m_n_uv8hi (__inactive
, __a
, __b
, __p
);
11377 __extension__
extern __inline int8x16_t
11378 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11379 __arm_vqsubq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
11381 return __builtin_mve_vqsubq_m_sv16qi (__inactive
, __a
, __b
, __p
);
11384 __extension__
extern __inline int32x4_t
11385 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11386 __arm_vqsubq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
11388 return __builtin_mve_vqsubq_m_sv4si (__inactive
, __a
, __b
, __p
);
11391 __extension__
extern __inline int16x8_t
11392 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11393 __arm_vqsubq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
11395 return __builtin_mve_vqsubq_m_sv8hi (__inactive
, __a
, __b
, __p
);
11398 __extension__
extern __inline uint8x16_t
11399 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11400 __arm_vqsubq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
11402 return __builtin_mve_vqsubq_m_uv16qi (__inactive
, __a
, __b
, __p
);
11405 __extension__
extern __inline uint32x4_t
11406 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11407 __arm_vqsubq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
11409 return __builtin_mve_vqsubq_m_uv4si (__inactive
, __a
, __b
, __p
);
11412 __extension__
extern __inline uint16x8_t
11413 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11414 __arm_vqsubq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
11416 return __builtin_mve_vqsubq_m_uv8hi (__inactive
, __a
, __b
, __p
);
11419 __extension__
extern __inline int8x16_t
11420 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11421 __arm_vrhaddq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
11423 return __builtin_mve_vrhaddq_m_sv16qi (__inactive
, __a
, __b
, __p
);
11426 __extension__
extern __inline int32x4_t
11427 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11428 __arm_vrhaddq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
11430 return __builtin_mve_vrhaddq_m_sv4si (__inactive
, __a
, __b
, __p
);
11433 __extension__
extern __inline int16x8_t
11434 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11435 __arm_vrhaddq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
11437 return __builtin_mve_vrhaddq_m_sv8hi (__inactive
, __a
, __b
, __p
);
11440 __extension__
extern __inline uint8x16_t
11441 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11442 __arm_vrhaddq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
11444 return __builtin_mve_vrhaddq_m_uv16qi (__inactive
, __a
, __b
, __p
);
11447 __extension__
extern __inline uint32x4_t
11448 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11449 __arm_vrhaddq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
11451 return __builtin_mve_vrhaddq_m_uv4si (__inactive
, __a
, __b
, __p
);
11454 __extension__
extern __inline uint16x8_t
11455 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11456 __arm_vrhaddq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
11458 return __builtin_mve_vrhaddq_m_uv8hi (__inactive
, __a
, __b
, __p
);
11461 __extension__
extern __inline int8x16_t
11462 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11463 __arm_vrmulhq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
11465 return __builtin_mve_vrmulhq_m_sv16qi (__inactive
, __a
, __b
, __p
);
11468 __extension__
extern __inline int32x4_t
11469 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11470 __arm_vrmulhq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
11472 return __builtin_mve_vrmulhq_m_sv4si (__inactive
, __a
, __b
, __p
);
11475 __extension__
extern __inline int16x8_t
11476 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11477 __arm_vrmulhq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
11479 return __builtin_mve_vrmulhq_m_sv8hi (__inactive
, __a
, __b
, __p
);
11482 __extension__
extern __inline uint8x16_t
11483 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11484 __arm_vrmulhq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
11486 return __builtin_mve_vrmulhq_m_uv16qi (__inactive
, __a
, __b
, __p
);
11489 __extension__
extern __inline uint32x4_t
11490 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11491 __arm_vrmulhq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
11493 return __builtin_mve_vrmulhq_m_uv4si (__inactive
, __a
, __b
, __p
);
11496 __extension__
extern __inline uint16x8_t
11497 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11498 __arm_vrmulhq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
11500 return __builtin_mve_vrmulhq_m_uv8hi (__inactive
, __a
, __b
, __p
);
11503 __extension__
extern __inline int8x16_t
11504 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11505 __arm_vrshlq_m_s8 (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
11507 return __builtin_mve_vrshlq_m_sv16qi (__inactive
, __a
, __b
, __p
);
11510 __extension__
extern __inline int32x4_t
11511 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11512 __arm_vrshlq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
11514 return __builtin_mve_vrshlq_m_sv4si (__inactive
, __a
, __b
, __p
);
11517 __extension__
extern __inline int16x8_t
11518 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11519 __arm_vrshlq_m_s16 (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
11521 return __builtin_mve_vrshlq_m_sv8hi (__inactive
, __a
, __b
, __p
);
11524 __extension__
extern __inline uint8x16_t
11525 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11526 __arm_vrshlq_m_u8 (uint8x16_t __inactive
, uint8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
11528 return __builtin_mve_vrshlq_m_uv16qi (__inactive
, __a
, __b
, __p
);
11531 __extension__
extern __inline uint32x4_t
11532 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11533 __arm_vrshlq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
11535 return __builtin_mve_vrshlq_m_uv4si (__inactive
, __a
, __b
, __p
);
11538 __extension__
extern __inline uint16x8_t
11539 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11540 __arm_vrshlq_m_u16 (uint16x8_t __inactive
, uint16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
11542 return __builtin_mve_vrshlq_m_uv8hi (__inactive
, __a
, __b
, __p
);
11545 __extension__
extern __inline int8x16_t
11546 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11547 __arm_vrshrq_m_n_s8 (int8x16_t __inactive
, int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
11549 return __builtin_mve_vrshrq_m_n_sv16qi (__inactive
, __a
, __imm
, __p
);
11552 __extension__
extern __inline int32x4_t
11553 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11554 __arm_vrshrq_m_n_s32 (int32x4_t __inactive
, int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
11556 return __builtin_mve_vrshrq_m_n_sv4si (__inactive
, __a
, __imm
, __p
);
11559 __extension__
extern __inline int16x8_t
11560 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11561 __arm_vrshrq_m_n_s16 (int16x8_t __inactive
, int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
11563 return __builtin_mve_vrshrq_m_n_sv8hi (__inactive
, __a
, __imm
, __p
);
11566 __extension__
extern __inline uint8x16_t
11567 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11568 __arm_vrshrq_m_n_u8 (uint8x16_t __inactive
, uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
11570 return __builtin_mve_vrshrq_m_n_uv16qi (__inactive
, __a
, __imm
, __p
);
11573 __extension__
extern __inline uint32x4_t
11574 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11575 __arm_vrshrq_m_n_u32 (uint32x4_t __inactive
, uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
11577 return __builtin_mve_vrshrq_m_n_uv4si (__inactive
, __a
, __imm
, __p
);
11580 __extension__
extern __inline uint16x8_t
11581 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11582 __arm_vrshrq_m_n_u16 (uint16x8_t __inactive
, uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
11584 return __builtin_mve_vrshrq_m_n_uv8hi (__inactive
, __a
, __imm
, __p
);
11587 __extension__
extern __inline int8x16_t
11588 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11589 __arm_vshlq_m_n_s8 (int8x16_t __inactive
, int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
11591 return __builtin_mve_vshlq_m_n_sv16qi (__inactive
, __a
, __imm
, __p
);
11594 __extension__
extern __inline int32x4_t
11595 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11596 __arm_vshlq_m_n_s32 (int32x4_t __inactive
, int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
11598 return __builtin_mve_vshlq_m_n_sv4si (__inactive
, __a
, __imm
, __p
);
11601 __extension__
extern __inline int16x8_t
11602 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11603 __arm_vshlq_m_n_s16 (int16x8_t __inactive
, int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
11605 return __builtin_mve_vshlq_m_n_sv8hi (__inactive
, __a
, __imm
, __p
);
11608 __extension__
extern __inline uint8x16_t
11609 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11610 __arm_vshlq_m_n_u8 (uint8x16_t __inactive
, uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
11612 return __builtin_mve_vshlq_m_n_uv16qi (__inactive
, __a
, __imm
, __p
);
11615 __extension__
extern __inline uint32x4_t
11616 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11617 __arm_vshlq_m_n_u32 (uint32x4_t __inactive
, uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
11619 return __builtin_mve_vshlq_m_n_uv4si (__inactive
, __a
, __imm
, __p
);
11622 __extension__
extern __inline uint16x8_t
11623 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11624 __arm_vshlq_m_n_u16 (uint16x8_t __inactive
, uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
11626 return __builtin_mve_vshlq_m_n_uv8hi (__inactive
, __a
, __imm
, __p
);
11629 __extension__
extern __inline int8x16_t
11630 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11631 __arm_vshrq_m_n_s8 (int8x16_t __inactive
, int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
11633 return __builtin_mve_vshrq_m_n_sv16qi (__inactive
, __a
, __imm
, __p
);
11636 __extension__
extern __inline int32x4_t
11637 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11638 __arm_vshrq_m_n_s32 (int32x4_t __inactive
, int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
11640 return __builtin_mve_vshrq_m_n_sv4si (__inactive
, __a
, __imm
, __p
);
11643 __extension__
extern __inline int16x8_t
11644 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11645 __arm_vshrq_m_n_s16 (int16x8_t __inactive
, int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
11647 return __builtin_mve_vshrq_m_n_sv8hi (__inactive
, __a
, __imm
, __p
);
11650 __extension__
extern __inline uint8x16_t
11651 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11652 __arm_vshrq_m_n_u8 (uint8x16_t __inactive
, uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
11654 return __builtin_mve_vshrq_m_n_uv16qi (__inactive
, __a
, __imm
, __p
);
11657 __extension__
extern __inline uint32x4_t
11658 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11659 __arm_vshrq_m_n_u32 (uint32x4_t __inactive
, uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
11661 return __builtin_mve_vshrq_m_n_uv4si (__inactive
, __a
, __imm
, __p
);
11664 __extension__
extern __inline uint16x8_t
11665 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11666 __arm_vshrq_m_n_u16 (uint16x8_t __inactive
, uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
11668 return __builtin_mve_vshrq_m_n_uv8hi (__inactive
, __a
, __imm
, __p
);
11671 __extension__
extern __inline int8x16_t
11672 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11673 __arm_vsliq_m_n_s8 (int8x16_t __a
, int8x16_t __b
, const int __imm
, mve_pred16_t __p
)
11675 return __builtin_mve_vsliq_m_n_sv16qi (__a
, __b
, __imm
, __p
);
11678 __extension__
extern __inline int32x4_t
11679 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11680 __arm_vsliq_m_n_s32 (int32x4_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
11682 return __builtin_mve_vsliq_m_n_sv4si (__a
, __b
, __imm
, __p
);
11685 __extension__
extern __inline int16x8_t
11686 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11687 __arm_vsliq_m_n_s16 (int16x8_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
11689 return __builtin_mve_vsliq_m_n_sv8hi (__a
, __b
, __imm
, __p
);
11692 __extension__
extern __inline uint8x16_t
11693 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11694 __arm_vsliq_m_n_u8 (uint8x16_t __a
, uint8x16_t __b
, const int __imm
, mve_pred16_t __p
)
11696 return __builtin_mve_vsliq_m_n_uv16qi (__a
, __b
, __imm
, __p
);
11699 __extension__
extern __inline uint32x4_t
11700 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11701 __arm_vsliq_m_n_u32 (uint32x4_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
11703 return __builtin_mve_vsliq_m_n_uv4si (__a
, __b
, __imm
, __p
);
11706 __extension__
extern __inline uint16x8_t
11707 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11708 __arm_vsliq_m_n_u16 (uint16x8_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
11710 return __builtin_mve_vsliq_m_n_uv8hi (__a
, __b
, __imm
, __p
);
11713 __extension__
extern __inline int8x16_t
11714 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11715 __arm_vsubq_m_n_s8 (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
11717 return __builtin_mve_vsubq_m_n_sv16qi (__inactive
, __a
, __b
, __p
);
11720 __extension__
extern __inline int32x4_t
11721 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11722 __arm_vsubq_m_n_s32 (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
11724 return __builtin_mve_vsubq_m_n_sv4si (__inactive
, __a
, __b
, __p
);
11727 __extension__
extern __inline int16x8_t
11728 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11729 __arm_vsubq_m_n_s16 (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
11731 return __builtin_mve_vsubq_m_n_sv8hi (__inactive
, __a
, __b
, __p
);
11734 __extension__
extern __inline uint8x16_t
11735 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11736 __arm_vsubq_m_n_u8 (uint8x16_t __inactive
, uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
11738 return __builtin_mve_vsubq_m_n_uv16qi (__inactive
, __a
, __b
, __p
);
11741 __extension__
extern __inline uint32x4_t
11742 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11743 __arm_vsubq_m_n_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
11745 return __builtin_mve_vsubq_m_n_uv4si (__inactive
, __a
, __b
, __p
);
11748 __extension__
extern __inline uint16x8_t
11749 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11750 __arm_vsubq_m_n_u16 (uint16x8_t __inactive
, uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
11752 return __builtin_mve_vsubq_m_n_uv8hi (__inactive
, __a
, __b
, __p
);
11755 __extension__
extern __inline
int64_t
11756 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11757 __arm_vmlaldavaq_p_s32 (int64_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
11759 return __builtin_mve_vmlaldavaq_p_sv4si (__a
, __b
, __c
, __p
);
11762 __extension__
extern __inline
int64_t
11763 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11764 __arm_vmlaldavaq_p_s16 (int64_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
11766 return __builtin_mve_vmlaldavaq_p_sv8hi (__a
, __b
, __c
, __p
);
11769 __extension__
extern __inline
uint64_t
11770 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11771 __arm_vmlaldavaq_p_u32 (uint64_t __a
, uint32x4_t __b
, uint32x4_t __c
, mve_pred16_t __p
)
11773 return __builtin_mve_vmlaldavaq_p_uv4si (__a
, __b
, __c
, __p
);
11776 __extension__
extern __inline
uint64_t
11777 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11778 __arm_vmlaldavaq_p_u16 (uint64_t __a
, uint16x8_t __b
, uint16x8_t __c
, mve_pred16_t __p
)
11780 return __builtin_mve_vmlaldavaq_p_uv8hi (__a
, __b
, __c
, __p
);
11783 __extension__
extern __inline
int64_t
11784 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11785 __arm_vmlaldavaxq_p_s32 (int64_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
11787 return __builtin_mve_vmlaldavaxq_p_sv4si (__a
, __b
, __c
, __p
);
11790 __extension__
extern __inline
int64_t
11791 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11792 __arm_vmlaldavaxq_p_s16 (int64_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
11794 return __builtin_mve_vmlaldavaxq_p_sv8hi (__a
, __b
, __c
, __p
);
11797 __extension__
extern __inline
int64_t
11798 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11799 __arm_vmlsldavaq_p_s32 (int64_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
11801 return __builtin_mve_vmlsldavaq_p_sv4si (__a
, __b
, __c
, __p
);
11804 __extension__
extern __inline
int64_t
11805 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11806 __arm_vmlsldavaq_p_s16 (int64_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
11808 return __builtin_mve_vmlsldavaq_p_sv8hi (__a
, __b
, __c
, __p
);
11811 __extension__
extern __inline
int64_t
11812 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11813 __arm_vmlsldavaxq_p_s32 (int64_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
11815 return __builtin_mve_vmlsldavaxq_p_sv4si (__a
, __b
, __c
, __p
);
11818 __extension__
extern __inline
int64_t
11819 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11820 __arm_vmlsldavaxq_p_s16 (int64_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
11822 return __builtin_mve_vmlsldavaxq_p_sv8hi (__a
, __b
, __c
, __p
);
11825 __extension__
extern __inline uint16x8_t
11826 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11827 __arm_vmullbq_poly_m_p8 (uint16x8_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
11829 return __builtin_mve_vmullbq_poly_m_pv16qi (__inactive
, __a
, __b
, __p
);
11832 __extension__
extern __inline uint32x4_t
11833 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11834 __arm_vmullbq_poly_m_p16 (uint32x4_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
11836 return __builtin_mve_vmullbq_poly_m_pv8hi (__inactive
, __a
, __b
, __p
);
11839 __extension__
extern __inline uint16x8_t
11840 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11841 __arm_vmulltq_poly_m_p8 (uint16x8_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
11843 return __builtin_mve_vmulltq_poly_m_pv16qi (__inactive
, __a
, __b
, __p
);
11846 __extension__
extern __inline uint32x4_t
11847 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11848 __arm_vmulltq_poly_m_p16 (uint32x4_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
11850 return __builtin_mve_vmulltq_poly_m_pv8hi (__inactive
, __a
, __b
, __p
);
11853 __extension__
extern __inline int64x2_t
11854 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11855 __arm_vqdmullbq_m_n_s32 (int64x2_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
11857 return __builtin_mve_vqdmullbq_m_n_sv4si (__inactive
, __a
, __b
, __p
);
11860 __extension__
extern __inline int32x4_t
11861 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11862 __arm_vqdmullbq_m_n_s16 (int32x4_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
11864 return __builtin_mve_vqdmullbq_m_n_sv8hi (__inactive
, __a
, __b
, __p
);
11867 __extension__
extern __inline int64x2_t
11868 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11869 __arm_vqdmullbq_m_s32 (int64x2_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
11871 return __builtin_mve_vqdmullbq_m_sv4si (__inactive
, __a
, __b
, __p
);
11874 __extension__
extern __inline int32x4_t
11875 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11876 __arm_vqdmullbq_m_s16 (int32x4_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
11878 return __builtin_mve_vqdmullbq_m_sv8hi (__inactive
, __a
, __b
, __p
);
11881 __extension__
extern __inline int64x2_t
11882 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11883 __arm_vqdmulltq_m_n_s32 (int64x2_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
11885 return __builtin_mve_vqdmulltq_m_n_sv4si (__inactive
, __a
, __b
, __p
);
11888 __extension__
extern __inline int32x4_t
11889 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11890 __arm_vqdmulltq_m_n_s16 (int32x4_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
11892 return __builtin_mve_vqdmulltq_m_n_sv8hi (__inactive
, __a
, __b
, __p
);
11895 __extension__
extern __inline int64x2_t
11896 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11897 __arm_vqdmulltq_m_s32 (int64x2_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
11899 return __builtin_mve_vqdmulltq_m_sv4si (__inactive
, __a
, __b
, __p
);
11902 __extension__
extern __inline int32x4_t
11903 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11904 __arm_vqdmulltq_m_s16 (int32x4_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
11906 return __builtin_mve_vqdmulltq_m_sv8hi (__inactive
, __a
, __b
, __p
);
11909 __extension__
extern __inline int16x8_t
11910 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11911 __arm_vqrshrnbq_m_n_s32 (int16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
11913 return __builtin_mve_vqrshrnbq_m_n_sv4si (__a
, __b
, __imm
, __p
);
11916 __extension__
extern __inline int8x16_t
11917 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11918 __arm_vqrshrnbq_m_n_s16 (int8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
11920 return __builtin_mve_vqrshrnbq_m_n_sv8hi (__a
, __b
, __imm
, __p
);
11923 __extension__
extern __inline uint16x8_t
11924 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11925 __arm_vqrshrnbq_m_n_u32 (uint16x8_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
11927 return __builtin_mve_vqrshrnbq_m_n_uv4si (__a
, __b
, __imm
, __p
);
11930 __extension__
extern __inline uint8x16_t
11931 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11932 __arm_vqrshrnbq_m_n_u16 (uint8x16_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
11934 return __builtin_mve_vqrshrnbq_m_n_uv8hi (__a
, __b
, __imm
, __p
);
11937 __extension__
extern __inline int16x8_t
11938 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11939 __arm_vqrshrntq_m_n_s32 (int16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
11941 return __builtin_mve_vqrshrntq_m_n_sv4si (__a
, __b
, __imm
, __p
);
11944 __extension__
extern __inline int8x16_t
11945 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11946 __arm_vqrshrntq_m_n_s16 (int8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
11948 return __builtin_mve_vqrshrntq_m_n_sv8hi (__a
, __b
, __imm
, __p
);
11951 __extension__
extern __inline uint16x8_t
11952 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11953 __arm_vqrshrntq_m_n_u32 (uint16x8_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
11955 return __builtin_mve_vqrshrntq_m_n_uv4si (__a
, __b
, __imm
, __p
);
11958 __extension__
extern __inline uint8x16_t
11959 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11960 __arm_vqrshrntq_m_n_u16 (uint8x16_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
11962 return __builtin_mve_vqrshrntq_m_n_uv8hi (__a
, __b
, __imm
, __p
);
11965 __extension__
extern __inline uint16x8_t
11966 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11967 __arm_vqrshrunbq_m_n_s32 (uint16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
11969 return __builtin_mve_vqrshrunbq_m_n_sv4si (__a
, __b
, __imm
, __p
);
11972 __extension__
extern __inline uint8x16_t
11973 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11974 __arm_vqrshrunbq_m_n_s16 (uint8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
11976 return __builtin_mve_vqrshrunbq_m_n_sv8hi (__a
, __b
, __imm
, __p
);
11979 __extension__
extern __inline uint16x8_t
11980 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11981 __arm_vqrshruntq_m_n_s32 (uint16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
11983 return __builtin_mve_vqrshruntq_m_n_sv4si (__a
, __b
, __imm
, __p
);
11986 __extension__
extern __inline uint8x16_t
11987 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11988 __arm_vqrshruntq_m_n_s16 (uint8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
11990 return __builtin_mve_vqrshruntq_m_n_sv8hi (__a
, __b
, __imm
, __p
);
11993 __extension__
extern __inline int16x8_t
11994 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
11995 __arm_vqshrnbq_m_n_s32 (int16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
11997 return __builtin_mve_vqshrnbq_m_n_sv4si (__a
, __b
, __imm
, __p
);
12000 __extension__
extern __inline int8x16_t
12001 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12002 __arm_vqshrnbq_m_n_s16 (int8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
12004 return __builtin_mve_vqshrnbq_m_n_sv8hi (__a
, __b
, __imm
, __p
);
12007 __extension__
extern __inline uint16x8_t
12008 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12009 __arm_vqshrnbq_m_n_u32 (uint16x8_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
12011 return __builtin_mve_vqshrnbq_m_n_uv4si (__a
, __b
, __imm
, __p
);
12014 __extension__
extern __inline uint8x16_t
12015 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12016 __arm_vqshrnbq_m_n_u16 (uint8x16_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
12018 return __builtin_mve_vqshrnbq_m_n_uv8hi (__a
, __b
, __imm
, __p
);
12021 __extension__
extern __inline int16x8_t
12022 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12023 __arm_vqshrntq_m_n_s32 (int16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
12025 return __builtin_mve_vqshrntq_m_n_sv4si (__a
, __b
, __imm
, __p
);
12028 __extension__
extern __inline int8x16_t
12029 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12030 __arm_vqshrntq_m_n_s16 (int8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
12032 return __builtin_mve_vqshrntq_m_n_sv8hi (__a
, __b
, __imm
, __p
);
12035 __extension__
extern __inline uint16x8_t
12036 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12037 __arm_vqshrntq_m_n_u32 (uint16x8_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
12039 return __builtin_mve_vqshrntq_m_n_uv4si (__a
, __b
, __imm
, __p
);
12042 __extension__
extern __inline uint8x16_t
12043 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12044 __arm_vqshrntq_m_n_u16 (uint8x16_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
12046 return __builtin_mve_vqshrntq_m_n_uv8hi (__a
, __b
, __imm
, __p
);
12049 __extension__
extern __inline uint16x8_t
12050 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12051 __arm_vqshrunbq_m_n_s32 (uint16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
12053 return __builtin_mve_vqshrunbq_m_n_sv4si (__a
, __b
, __imm
, __p
);
12056 __extension__
extern __inline uint8x16_t
12057 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12058 __arm_vqshrunbq_m_n_s16 (uint8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
12060 return __builtin_mve_vqshrunbq_m_n_sv8hi (__a
, __b
, __imm
, __p
);
12063 __extension__
extern __inline uint16x8_t
12064 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12065 __arm_vqshruntq_m_n_s32 (uint16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
12067 return __builtin_mve_vqshruntq_m_n_sv4si (__a
, __b
, __imm
, __p
);
12070 __extension__
extern __inline uint8x16_t
12071 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12072 __arm_vqshruntq_m_n_s16 (uint8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
12074 return __builtin_mve_vqshruntq_m_n_sv8hi (__a
, __b
, __imm
, __p
);
12077 __extension__
extern __inline
int64_t
12078 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12079 __arm_vrmlaldavhaq_p_s32 (int64_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
12081 return __builtin_mve_vrmlaldavhaq_p_sv4si (__a
, __b
, __c
, __p
);
12084 __extension__
extern __inline
uint64_t
12085 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12086 __arm_vrmlaldavhaq_p_u32 (uint64_t __a
, uint32x4_t __b
, uint32x4_t __c
, mve_pred16_t __p
)
12088 return __builtin_mve_vrmlaldavhaq_p_uv4si (__a
, __b
, __c
, __p
);
12091 __extension__
extern __inline
int64_t
12092 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12093 __arm_vrmlaldavhaxq_p_s32 (int64_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
12095 return __builtin_mve_vrmlaldavhaxq_p_sv4si (__a
, __b
, __c
, __p
);
12098 __extension__
extern __inline
int64_t
12099 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12100 __arm_vrmlsldavhaq_p_s32 (int64_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
12102 return __builtin_mve_vrmlsldavhaq_p_sv4si (__a
, __b
, __c
, __p
);
12105 __extension__
extern __inline
int64_t
12106 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12107 __arm_vrmlsldavhaxq_p_s32 (int64_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
12109 return __builtin_mve_vrmlsldavhaxq_p_sv4si (__a
, __b
, __c
, __p
);
12112 __extension__
extern __inline int16x8_t
12113 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12114 __arm_vrshrnbq_m_n_s32 (int16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
12116 return __builtin_mve_vrshrnbq_m_n_sv4si (__a
, __b
, __imm
, __p
);
12119 __extension__
extern __inline int8x16_t
12120 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12121 __arm_vrshrnbq_m_n_s16 (int8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
12123 return __builtin_mve_vrshrnbq_m_n_sv8hi (__a
, __b
, __imm
, __p
);
12126 __extension__
extern __inline uint16x8_t
12127 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12128 __arm_vrshrnbq_m_n_u32 (uint16x8_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
12130 return __builtin_mve_vrshrnbq_m_n_uv4si (__a
, __b
, __imm
, __p
);
12133 __extension__
extern __inline uint8x16_t
12134 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12135 __arm_vrshrnbq_m_n_u16 (uint8x16_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
12137 return __builtin_mve_vrshrnbq_m_n_uv8hi (__a
, __b
, __imm
, __p
);
12140 __extension__
extern __inline int16x8_t
12141 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12142 __arm_vrshrntq_m_n_s32 (int16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
12144 return __builtin_mve_vrshrntq_m_n_sv4si (__a
, __b
, __imm
, __p
);
12147 __extension__
extern __inline int8x16_t
12148 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12149 __arm_vrshrntq_m_n_s16 (int8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
12151 return __builtin_mve_vrshrntq_m_n_sv8hi (__a
, __b
, __imm
, __p
);
12154 __extension__
extern __inline uint16x8_t
12155 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12156 __arm_vrshrntq_m_n_u32 (uint16x8_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
12158 return __builtin_mve_vrshrntq_m_n_uv4si (__a
, __b
, __imm
, __p
);
12161 __extension__
extern __inline uint8x16_t
12162 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12163 __arm_vrshrntq_m_n_u16 (uint8x16_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
12165 return __builtin_mve_vrshrntq_m_n_uv8hi (__a
, __b
, __imm
, __p
);
12168 __extension__
extern __inline int16x8_t
12169 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12170 __arm_vshllbq_m_n_s8 (int16x8_t __inactive
, int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
12172 return __builtin_mve_vshllbq_m_n_sv16qi (__inactive
, __a
, __imm
, __p
);
12175 __extension__
extern __inline int32x4_t
12176 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12177 __arm_vshllbq_m_n_s16 (int32x4_t __inactive
, int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
12179 return __builtin_mve_vshllbq_m_n_sv8hi (__inactive
, __a
, __imm
, __p
);
12182 __extension__
extern __inline uint16x8_t
12183 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12184 __arm_vshllbq_m_n_u8 (uint16x8_t __inactive
, uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
12186 return __builtin_mve_vshllbq_m_n_uv16qi (__inactive
, __a
, __imm
, __p
);
12189 __extension__
extern __inline uint32x4_t
12190 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12191 __arm_vshllbq_m_n_u16 (uint32x4_t __inactive
, uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
12193 return __builtin_mve_vshllbq_m_n_uv8hi (__inactive
, __a
, __imm
, __p
);
12196 __extension__
extern __inline int16x8_t
12197 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12198 __arm_vshlltq_m_n_s8 (int16x8_t __inactive
, int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
12200 return __builtin_mve_vshlltq_m_n_sv16qi (__inactive
, __a
, __imm
, __p
);
12203 __extension__
extern __inline int32x4_t
12204 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12205 __arm_vshlltq_m_n_s16 (int32x4_t __inactive
, int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
12207 return __builtin_mve_vshlltq_m_n_sv8hi (__inactive
, __a
, __imm
, __p
);
12210 __extension__
extern __inline uint16x8_t
12211 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12212 __arm_vshlltq_m_n_u8 (uint16x8_t __inactive
, uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
12214 return __builtin_mve_vshlltq_m_n_uv16qi (__inactive
, __a
, __imm
, __p
);
12217 __extension__
extern __inline uint32x4_t
12218 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12219 __arm_vshlltq_m_n_u16 (uint32x4_t __inactive
, uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
12221 return __builtin_mve_vshlltq_m_n_uv8hi (__inactive
, __a
, __imm
, __p
);
12224 __extension__
extern __inline int16x8_t
12225 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12226 __arm_vshrnbq_m_n_s32 (int16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
12228 return __builtin_mve_vshrnbq_m_n_sv4si (__a
, __b
, __imm
, __p
);
12231 __extension__
extern __inline int8x16_t
12232 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12233 __arm_vshrnbq_m_n_s16 (int8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
12235 return __builtin_mve_vshrnbq_m_n_sv8hi (__a
, __b
, __imm
, __p
);
12238 __extension__
extern __inline uint16x8_t
12239 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12240 __arm_vshrnbq_m_n_u32 (uint16x8_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
12242 return __builtin_mve_vshrnbq_m_n_uv4si (__a
, __b
, __imm
, __p
);
12245 __extension__
extern __inline uint8x16_t
12246 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12247 __arm_vshrnbq_m_n_u16 (uint8x16_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
12249 return __builtin_mve_vshrnbq_m_n_uv8hi (__a
, __b
, __imm
, __p
);
12252 __extension__
extern __inline int16x8_t
12253 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12254 __arm_vshrntq_m_n_s32 (int16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
12256 return __builtin_mve_vshrntq_m_n_sv4si (__a
, __b
, __imm
, __p
);
12259 __extension__
extern __inline int8x16_t
12260 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12261 __arm_vshrntq_m_n_s16 (int8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
12263 return __builtin_mve_vshrntq_m_n_sv8hi (__a
, __b
, __imm
, __p
);
12266 __extension__
extern __inline uint16x8_t
12267 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12268 __arm_vshrntq_m_n_u32 (uint16x8_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
12270 return __builtin_mve_vshrntq_m_n_uv4si (__a
, __b
, __imm
, __p
);
12273 __extension__
extern __inline uint8x16_t
12274 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12275 __arm_vshrntq_m_n_u16 (uint8x16_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
12277 return __builtin_mve_vshrntq_m_n_uv8hi (__a
, __b
, __imm
, __p
);
12280 __extension__
extern __inline
void
12281 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12282 __arm_vstrbq_scatter_offset_s8 (int8_t * __base
, uint8x16_t __offset
, int8x16_t __value
)
12284 __builtin_mve_vstrbq_scatter_offset_sv16qi ((__builtin_neon_qi
*) __base
, __offset
, __value
);
12287 __extension__
extern __inline
void
12288 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12289 __arm_vstrbq_scatter_offset_s32 (int8_t * __base
, uint32x4_t __offset
, int32x4_t __value
)
12291 __builtin_mve_vstrbq_scatter_offset_sv4si ((__builtin_neon_qi
*) __base
, __offset
, __value
);
12294 __extension__
extern __inline
void
12295 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12296 __arm_vstrbq_scatter_offset_s16 (int8_t * __base
, uint16x8_t __offset
, int16x8_t __value
)
12298 __builtin_mve_vstrbq_scatter_offset_sv8hi ((__builtin_neon_qi
*) __base
, __offset
, __value
);
12301 __extension__
extern __inline
void
12302 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12303 __arm_vstrbq_scatter_offset_u8 (uint8_t * __base
, uint8x16_t __offset
, uint8x16_t __value
)
12305 __builtin_mve_vstrbq_scatter_offset_uv16qi ((__builtin_neon_qi
*) __base
, __offset
, __value
);
12308 __extension__
extern __inline
void
12309 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12310 __arm_vstrbq_scatter_offset_u32 (uint8_t * __base
, uint32x4_t __offset
, uint32x4_t __value
)
12312 __builtin_mve_vstrbq_scatter_offset_uv4si ((__builtin_neon_qi
*) __base
, __offset
, __value
);
12315 __extension__
extern __inline
void
12316 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12317 __arm_vstrbq_scatter_offset_u16 (uint8_t * __base
, uint16x8_t __offset
, uint16x8_t __value
)
12319 __builtin_mve_vstrbq_scatter_offset_uv8hi ((__builtin_neon_qi
*) __base
, __offset
, __value
);
12322 __extension__
extern __inline
void
12323 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12324 __arm_vstrbq_s8 (int8_t * __addr
, int8x16_t __value
)
12326 __builtin_mve_vstrbq_sv16qi ((__builtin_neon_qi
*) __addr
, __value
);
12329 __extension__
extern __inline
void
12330 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12331 __arm_vstrbq_s32 (int8_t * __addr
, int32x4_t __value
)
12333 __builtin_mve_vstrbq_sv4si ((__builtin_neon_qi
*) __addr
, __value
);
12336 __extension__
extern __inline
void
12337 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12338 __arm_vstrbq_s16 (int8_t * __addr
, int16x8_t __value
)
12340 __builtin_mve_vstrbq_sv8hi ((__builtin_neon_qi
*) __addr
, __value
);
12343 __extension__
extern __inline
void
12344 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12345 __arm_vstrbq_u8 (uint8_t * __addr
, uint8x16_t __value
)
12347 __builtin_mve_vstrbq_uv16qi ((__builtin_neon_qi
*) __addr
, __value
);
12350 __extension__
extern __inline
void
12351 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12352 __arm_vstrbq_u32 (uint8_t * __addr
, uint32x4_t __value
)
12354 __builtin_mve_vstrbq_uv4si ((__builtin_neon_qi
*) __addr
, __value
);
12357 __extension__
extern __inline
void
12358 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12359 __arm_vstrbq_u16 (uint8_t * __addr
, uint16x8_t __value
)
12361 __builtin_mve_vstrbq_uv8hi ((__builtin_neon_qi
*) __addr
, __value
);
12364 __extension__
extern __inline
void
12365 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12366 __arm_vstrwq_scatter_base_s32 (uint32x4_t __addr
, const int __offset
, int32x4_t __value
)
12368 __builtin_mve_vstrwq_scatter_base_sv4si (__addr
, __offset
, __value
);
12371 __extension__
extern __inline
void
12372 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12373 __arm_vstrwq_scatter_base_u32 (uint32x4_t __addr
, const int __offset
, uint32x4_t __value
)
12375 __builtin_mve_vstrwq_scatter_base_uv4si (__addr
, __offset
, __value
);
12378 __extension__
extern __inline uint8x16_t
12379 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12380 __arm_vldrbq_gather_offset_u8 (uint8_t const * __base
, uint8x16_t __offset
)
12382 return __builtin_mve_vldrbq_gather_offset_uv16qi ((__builtin_neon_qi
*) __base
, __offset
);
12385 __extension__
extern __inline int8x16_t
12386 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12387 __arm_vldrbq_gather_offset_s8 (int8_t const * __base
, uint8x16_t __offset
)
12389 return __builtin_mve_vldrbq_gather_offset_sv16qi ((__builtin_neon_qi
*) __base
, __offset
);
12392 __extension__
extern __inline int8x16_t
12393 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12394 __arm_vldrbq_s8 (int8_t const * __base
)
12396 return __builtin_mve_vldrbq_sv16qi ((__builtin_neon_qi
*) __base
);
12399 __extension__
extern __inline uint8x16_t
12400 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12401 __arm_vldrbq_u8 (uint8_t const * __base
)
12403 return __builtin_mve_vldrbq_uv16qi ((__builtin_neon_qi
*) __base
);
12406 __extension__
extern __inline uint16x8_t
12407 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12408 __arm_vldrbq_gather_offset_u16 (uint8_t const * __base
, uint16x8_t __offset
)
12410 return __builtin_mve_vldrbq_gather_offset_uv8hi ((__builtin_neon_qi
*) __base
, __offset
);
12413 __extension__
extern __inline int16x8_t
12414 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12415 __arm_vldrbq_gather_offset_s16 (int8_t const * __base
, uint16x8_t __offset
)
12417 return __builtin_mve_vldrbq_gather_offset_sv8hi ((__builtin_neon_qi
*) __base
, __offset
);
12420 __extension__
extern __inline int16x8_t
12421 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12422 __arm_vldrbq_s16 (int8_t const * __base
)
12424 return __builtin_mve_vldrbq_sv8hi ((__builtin_neon_qi
*) __base
);
12427 __extension__
extern __inline uint16x8_t
12428 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12429 __arm_vldrbq_u16 (uint8_t const * __base
)
12431 return __builtin_mve_vldrbq_uv8hi ((__builtin_neon_qi
*) __base
);
12434 __extension__
extern __inline uint32x4_t
12435 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12436 __arm_vldrbq_gather_offset_u32 (uint8_t const * __base
, uint32x4_t __offset
)
12438 return __builtin_mve_vldrbq_gather_offset_uv4si ((__builtin_neon_qi
*) __base
, __offset
);
12441 __extension__
extern __inline int32x4_t
12442 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12443 __arm_vldrbq_gather_offset_s32 (int8_t const * __base
, uint32x4_t __offset
)
12445 return __builtin_mve_vldrbq_gather_offset_sv4si ((__builtin_neon_qi
*) __base
, __offset
);
12448 __extension__
extern __inline int32x4_t
12449 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12450 __arm_vldrbq_s32 (int8_t const * __base
)
12452 return __builtin_mve_vldrbq_sv4si ((__builtin_neon_qi
*) __base
);
12455 __extension__
extern __inline uint32x4_t
12456 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12457 __arm_vldrbq_u32 (uint8_t const * __base
)
12459 return __builtin_mve_vldrbq_uv4si ((__builtin_neon_qi
*) __base
);
12462 __extension__
extern __inline int32x4_t
12463 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12464 __arm_vldrwq_gather_base_s32 (uint32x4_t __addr
, const int __offset
)
12466 return __builtin_mve_vldrwq_gather_base_sv4si (__addr
, __offset
);
12469 __extension__
extern __inline uint32x4_t
12470 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12471 __arm_vldrwq_gather_base_u32 (uint32x4_t __addr
, const int __offset
)
12473 return __builtin_mve_vldrwq_gather_base_uv4si (__addr
, __offset
);
12476 __extension__
extern __inline
void
12477 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12478 __arm_vstrbq_p_s8 (int8_t * __addr
, int8x16_t __value
, mve_pred16_t __p
)
12480 __builtin_mve_vstrbq_p_sv16qi ((__builtin_neon_qi
*) __addr
, __value
, __p
);
12483 __extension__
extern __inline
void
12484 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12485 __arm_vstrbq_p_s32 (int8_t * __addr
, int32x4_t __value
, mve_pred16_t __p
)
12487 __builtin_mve_vstrbq_p_sv4si ((__builtin_neon_qi
*) __addr
, __value
, __p
);
12490 __extension__
extern __inline
void
12491 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12492 __arm_vstrbq_p_s16 (int8_t * __addr
, int16x8_t __value
, mve_pred16_t __p
)
12494 __builtin_mve_vstrbq_p_sv8hi ((__builtin_neon_qi
*) __addr
, __value
, __p
);
12497 __extension__
extern __inline
void
12498 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12499 __arm_vstrbq_p_u8 (uint8_t * __addr
, uint8x16_t __value
, mve_pred16_t __p
)
12501 __builtin_mve_vstrbq_p_uv16qi ((__builtin_neon_qi
*) __addr
, __value
, __p
);
12504 __extension__
extern __inline
void
12505 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12506 __arm_vstrbq_p_u32 (uint8_t * __addr
, uint32x4_t __value
, mve_pred16_t __p
)
12508 __builtin_mve_vstrbq_p_uv4si ((__builtin_neon_qi
*) __addr
, __value
, __p
);
12511 __extension__
extern __inline
void
12512 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12513 __arm_vstrbq_p_u16 (uint8_t * __addr
, uint16x8_t __value
, mve_pred16_t __p
)
12515 __builtin_mve_vstrbq_p_uv8hi ((__builtin_neon_qi
*) __addr
, __value
, __p
);
12518 __extension__
extern __inline
void
12519 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12520 __arm_vstrbq_scatter_offset_p_s8 (int8_t * __base
, uint8x16_t __offset
, int8x16_t __value
, mve_pred16_t __p
)
12522 __builtin_mve_vstrbq_scatter_offset_p_sv16qi ((__builtin_neon_qi
*) __base
, __offset
, __value
, __p
);
12525 __extension__
extern __inline
void
12526 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12527 __arm_vstrbq_scatter_offset_p_s32 (int8_t * __base
, uint32x4_t __offset
, int32x4_t __value
, mve_pred16_t __p
)
12529 __builtin_mve_vstrbq_scatter_offset_p_sv4si ((__builtin_neon_qi
*) __base
, __offset
, __value
, __p
);
12532 __extension__
extern __inline
void
12533 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12534 __arm_vstrbq_scatter_offset_p_s16 (int8_t * __base
, uint16x8_t __offset
, int16x8_t __value
, mve_pred16_t __p
)
12536 __builtin_mve_vstrbq_scatter_offset_p_sv8hi ((__builtin_neon_qi
*) __base
, __offset
, __value
, __p
);
12539 __extension__
extern __inline
void
12540 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12541 __arm_vstrbq_scatter_offset_p_u8 (uint8_t * __base
, uint8x16_t __offset
, uint8x16_t __value
, mve_pred16_t __p
)
12543 __builtin_mve_vstrbq_scatter_offset_p_uv16qi ((__builtin_neon_qi
*) __base
, __offset
, __value
, __p
);
12546 __extension__
extern __inline
void
12547 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12548 __arm_vstrbq_scatter_offset_p_u32 (uint8_t * __base
, uint32x4_t __offset
, uint32x4_t __value
, mve_pred16_t __p
)
12550 __builtin_mve_vstrbq_scatter_offset_p_uv4si ((__builtin_neon_qi
*) __base
, __offset
, __value
, __p
);
12553 __extension__
extern __inline
void
12554 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12555 __arm_vstrbq_scatter_offset_p_u16 (uint8_t * __base
, uint16x8_t __offset
, uint16x8_t __value
, mve_pred16_t __p
)
12557 __builtin_mve_vstrbq_scatter_offset_p_uv8hi ((__builtin_neon_qi
*) __base
, __offset
, __value
, __p
);
12560 __extension__
extern __inline
void
12561 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12562 __arm_vstrwq_scatter_base_p_s32 (uint32x4_t __addr
, const int __offset
, int32x4_t __value
, mve_pred16_t __p
)
12564 __builtin_mve_vstrwq_scatter_base_p_sv4si (__addr
, __offset
, __value
, __p
);
12567 __extension__
extern __inline
void
12568 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12569 __arm_vstrwq_scatter_base_p_u32 (uint32x4_t __addr
, const int __offset
, uint32x4_t __value
, mve_pred16_t __p
)
12571 __builtin_mve_vstrwq_scatter_base_p_uv4si (__addr
, __offset
, __value
, __p
);
12574 __extension__
extern __inline int8x16_t
12575 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12576 __arm_vldrbq_gather_offset_z_s8 (int8_t const * __base
, uint8x16_t __offset
, mve_pred16_t __p
)
12578 return __builtin_mve_vldrbq_gather_offset_z_sv16qi ((__builtin_neon_qi
*) __base
, __offset
, __p
);
12581 __extension__
extern __inline int32x4_t
12582 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12583 __arm_vldrbq_gather_offset_z_s32 (int8_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
12585 return __builtin_mve_vldrbq_gather_offset_z_sv4si ((__builtin_neon_qi
*) __base
, __offset
, __p
);
12588 __extension__
extern __inline int16x8_t
12589 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12590 __arm_vldrbq_gather_offset_z_s16 (int8_t const * __base
, uint16x8_t __offset
, mve_pred16_t __p
)
12592 return __builtin_mve_vldrbq_gather_offset_z_sv8hi ((__builtin_neon_qi
*) __base
, __offset
, __p
);
12595 __extension__
extern __inline uint8x16_t
12596 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12597 __arm_vldrbq_gather_offset_z_u8 (uint8_t const * __base
, uint8x16_t __offset
, mve_pred16_t __p
)
12599 return __builtin_mve_vldrbq_gather_offset_z_uv16qi ((__builtin_neon_qi
*) __base
, __offset
, __p
);
12602 __extension__
extern __inline uint32x4_t
12603 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12604 __arm_vldrbq_gather_offset_z_u32 (uint8_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
12606 return __builtin_mve_vldrbq_gather_offset_z_uv4si ((__builtin_neon_qi
*) __base
, __offset
, __p
);
12609 __extension__
extern __inline uint16x8_t
12610 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12611 __arm_vldrbq_gather_offset_z_u16 (uint8_t const * __base
, uint16x8_t __offset
, mve_pred16_t __p
)
12613 return __builtin_mve_vldrbq_gather_offset_z_uv8hi ((__builtin_neon_qi
*) __base
, __offset
, __p
);
12616 __extension__
extern __inline int8x16_t
12617 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12618 __arm_vldrbq_z_s8 (int8_t const * __base
, mve_pred16_t __p
)
12620 return __builtin_mve_vldrbq_z_sv16qi ((__builtin_neon_qi
*) __base
, __p
);
12623 __extension__
extern __inline int32x4_t
12624 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12625 __arm_vldrbq_z_s32 (int8_t const * __base
, mve_pred16_t __p
)
12627 return __builtin_mve_vldrbq_z_sv4si ((__builtin_neon_qi
*) __base
, __p
);
12630 __extension__
extern __inline int16x8_t
12631 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12632 __arm_vldrbq_z_s16 (int8_t const * __base
, mve_pred16_t __p
)
12634 return __builtin_mve_vldrbq_z_sv8hi ((__builtin_neon_qi
*) __base
, __p
);
12637 __extension__
extern __inline uint8x16_t
12638 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12639 __arm_vldrbq_z_u8 (uint8_t const * __base
, mve_pred16_t __p
)
12641 return __builtin_mve_vldrbq_z_uv16qi ((__builtin_neon_qi
*) __base
, __p
);
12644 __extension__
extern __inline uint32x4_t
12645 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12646 __arm_vldrbq_z_u32 (uint8_t const * __base
, mve_pred16_t __p
)
12648 return __builtin_mve_vldrbq_z_uv4si ((__builtin_neon_qi
*) __base
, __p
);
12651 __extension__
extern __inline uint16x8_t
12652 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12653 __arm_vldrbq_z_u16 (uint8_t const * __base
, mve_pred16_t __p
)
12655 return __builtin_mve_vldrbq_z_uv8hi ((__builtin_neon_qi
*) __base
, __p
);
12658 __extension__
extern __inline int32x4_t
12659 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12660 __arm_vldrwq_gather_base_z_s32 (uint32x4_t __addr
, const int __offset
, mve_pred16_t __p
)
12662 return __builtin_mve_vldrwq_gather_base_z_sv4si (__addr
, __offset
, __p
);
12665 __extension__
extern __inline uint32x4_t
12666 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12667 __arm_vldrwq_gather_base_z_u32 (uint32x4_t __addr
, const int __offset
, mve_pred16_t __p
)
12669 return __builtin_mve_vldrwq_gather_base_z_uv4si (__addr
, __offset
, __p
);
12672 __extension__
extern __inline int8x16_t
12673 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12674 __arm_vld1q_s8 (int8_t const * __base
)
12676 return __builtin_mve_vld1q_sv16qi ((__builtin_neon_qi
*) __base
);
12679 __extension__
extern __inline int32x4_t
12680 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12681 __arm_vld1q_s32 (int32_t const * __base
)
12683 return __builtin_mve_vld1q_sv4si ((__builtin_neon_si
*) __base
);
12686 __extension__
extern __inline int16x8_t
12687 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12688 __arm_vld1q_s16 (int16_t const * __base
)
12690 return __builtin_mve_vld1q_sv8hi ((__builtin_neon_hi
*) __base
);
12693 __extension__
extern __inline uint8x16_t
12694 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12695 __arm_vld1q_u8 (uint8_t const * __base
)
12697 return __builtin_mve_vld1q_uv16qi ((__builtin_neon_qi
*) __base
);
12700 __extension__
extern __inline uint32x4_t
12701 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12702 __arm_vld1q_u32 (uint32_t const * __base
)
12704 return __builtin_mve_vld1q_uv4si ((__builtin_neon_si
*) __base
);
12707 __extension__
extern __inline uint16x8_t
12708 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12709 __arm_vld1q_u16 (uint16_t const * __base
)
12711 return __builtin_mve_vld1q_uv8hi ((__builtin_neon_hi
*) __base
);
12714 __extension__
extern __inline int32x4_t
12715 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12716 __arm_vldrhq_gather_offset_s32 (int16_t const * __base
, uint32x4_t __offset
)
12718 return __builtin_mve_vldrhq_gather_offset_sv4si ((__builtin_neon_hi
*) __base
, __offset
);
12721 __extension__
extern __inline int16x8_t
12722 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12723 __arm_vldrhq_gather_offset_s16 (int16_t const * __base
, uint16x8_t __offset
)
12725 return __builtin_mve_vldrhq_gather_offset_sv8hi ((__builtin_neon_hi
*) __base
, __offset
);
12728 __extension__
extern __inline uint32x4_t
12729 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12730 __arm_vldrhq_gather_offset_u32 (uint16_t const * __base
, uint32x4_t __offset
)
12732 return __builtin_mve_vldrhq_gather_offset_uv4si ((__builtin_neon_hi
*) __base
, __offset
);
12735 __extension__
extern __inline uint16x8_t
12736 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12737 __arm_vldrhq_gather_offset_u16 (uint16_t const * __base
, uint16x8_t __offset
)
12739 return __builtin_mve_vldrhq_gather_offset_uv8hi ((__builtin_neon_hi
*) __base
, __offset
);
12742 __extension__
extern __inline int32x4_t
12743 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12744 __arm_vldrhq_gather_offset_z_s32 (int16_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
12746 return __builtin_mve_vldrhq_gather_offset_z_sv4si ((__builtin_neon_hi
*) __base
, __offset
, __p
);
12749 __extension__
extern __inline int16x8_t
12750 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12751 __arm_vldrhq_gather_offset_z_s16 (int16_t const * __base
, uint16x8_t __offset
, mve_pred16_t __p
)
12753 return __builtin_mve_vldrhq_gather_offset_z_sv8hi ((__builtin_neon_hi
*) __base
, __offset
, __p
);
12756 __extension__
extern __inline uint32x4_t
12757 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12758 __arm_vldrhq_gather_offset_z_u32 (uint16_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
12760 return __builtin_mve_vldrhq_gather_offset_z_uv4si ((__builtin_neon_hi
*) __base
, __offset
, __p
);
12763 __extension__
extern __inline uint16x8_t
12764 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12765 __arm_vldrhq_gather_offset_z_u16 (uint16_t const * __base
, uint16x8_t __offset
, mve_pred16_t __p
)
12767 return __builtin_mve_vldrhq_gather_offset_z_uv8hi ((__builtin_neon_hi
*) __base
, __offset
, __p
);
12770 __extension__
extern __inline int32x4_t
12771 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12772 __arm_vldrhq_gather_shifted_offset_s32 (int16_t const * __base
, uint32x4_t __offset
)
12774 return __builtin_mve_vldrhq_gather_shifted_offset_sv4si ((__builtin_neon_hi
*) __base
, __offset
);
12777 __extension__
extern __inline int16x8_t
12778 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12779 __arm_vldrhq_gather_shifted_offset_s16 (int16_t const * __base
, uint16x8_t __offset
)
12781 return __builtin_mve_vldrhq_gather_shifted_offset_sv8hi ((__builtin_neon_hi
*) __base
, __offset
);
12784 __extension__
extern __inline uint32x4_t
12785 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12786 __arm_vldrhq_gather_shifted_offset_u32 (uint16_t const * __base
, uint32x4_t __offset
)
12788 return __builtin_mve_vldrhq_gather_shifted_offset_uv4si ((__builtin_neon_hi
*) __base
, __offset
);
12791 __extension__
extern __inline uint16x8_t
12792 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12793 __arm_vldrhq_gather_shifted_offset_u16 (uint16_t const * __base
, uint16x8_t __offset
)
12795 return __builtin_mve_vldrhq_gather_shifted_offset_uv8hi ((__builtin_neon_hi
*) __base
, __offset
);
12798 __extension__
extern __inline int32x4_t
12799 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12800 __arm_vldrhq_gather_shifted_offset_z_s32 (int16_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
12802 return __builtin_mve_vldrhq_gather_shifted_offset_z_sv4si ((__builtin_neon_hi
*) __base
, __offset
, __p
);
12805 __extension__
extern __inline int16x8_t
12806 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12807 __arm_vldrhq_gather_shifted_offset_z_s16 (int16_t const * __base
, uint16x8_t __offset
, mve_pred16_t __p
)
12809 return __builtin_mve_vldrhq_gather_shifted_offset_z_sv8hi ((__builtin_neon_hi
*) __base
, __offset
, __p
);
12812 __extension__
extern __inline uint32x4_t
12813 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12814 __arm_vldrhq_gather_shifted_offset_z_u32 (uint16_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
12816 return __builtin_mve_vldrhq_gather_shifted_offset_z_uv4si ((__builtin_neon_hi
*) __base
, __offset
, __p
);
12819 __extension__
extern __inline uint16x8_t
12820 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12821 __arm_vldrhq_gather_shifted_offset_z_u16 (uint16_t const * __base
, uint16x8_t __offset
, mve_pred16_t __p
)
12823 return __builtin_mve_vldrhq_gather_shifted_offset_z_uv8hi ((__builtin_neon_hi
*) __base
, __offset
, __p
);
12826 __extension__
extern __inline int32x4_t
12827 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12828 __arm_vldrhq_s32 (int16_t const * __base
)
12830 return __builtin_mve_vldrhq_sv4si ((__builtin_neon_hi
*) __base
);
12833 __extension__
extern __inline int16x8_t
12834 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12835 __arm_vldrhq_s16 (int16_t const * __base
)
12837 return __builtin_mve_vldrhq_sv8hi ((__builtin_neon_hi
*) __base
);
12840 __extension__
extern __inline uint32x4_t
12841 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12842 __arm_vldrhq_u32 (uint16_t const * __base
)
12844 return __builtin_mve_vldrhq_uv4si ((__builtin_neon_hi
*) __base
);
12847 __extension__
extern __inline uint16x8_t
12848 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12849 __arm_vldrhq_u16 (uint16_t const * __base
)
12851 return __builtin_mve_vldrhq_uv8hi ((__builtin_neon_hi
*) __base
);
12854 __extension__
extern __inline int32x4_t
12855 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12856 __arm_vldrhq_z_s32 (int16_t const * __base
, mve_pred16_t __p
)
12858 return __builtin_mve_vldrhq_z_sv4si ((__builtin_neon_hi
*) __base
, __p
);
12861 __extension__
extern __inline int16x8_t
12862 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12863 __arm_vldrhq_z_s16 (int16_t const * __base
, mve_pred16_t __p
)
12865 return __builtin_mve_vldrhq_z_sv8hi ((__builtin_neon_hi
*) __base
, __p
);
12868 __extension__
extern __inline uint32x4_t
12869 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12870 __arm_vldrhq_z_u32 (uint16_t const * __base
, mve_pred16_t __p
)
12872 return __builtin_mve_vldrhq_z_uv4si ((__builtin_neon_hi
*) __base
, __p
);
12875 __extension__
extern __inline uint16x8_t
12876 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12877 __arm_vldrhq_z_u16 (uint16_t const * __base
, mve_pred16_t __p
)
12879 return __builtin_mve_vldrhq_z_uv8hi ((__builtin_neon_hi
*) __base
, __p
);
12882 __extension__
extern __inline int32x4_t
12883 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12884 __arm_vldrwq_s32 (int32_t const * __base
)
12886 return __builtin_mve_vldrwq_sv4si ((__builtin_neon_si
*) __base
);
12889 __extension__
extern __inline uint32x4_t
12890 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12891 __arm_vldrwq_u32 (uint32_t const * __base
)
12893 return __builtin_mve_vldrwq_uv4si ((__builtin_neon_si
*) __base
);
12897 __extension__
extern __inline int32x4_t
12898 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12899 __arm_vldrwq_z_s32 (int32_t const * __base
, mve_pred16_t __p
)
12901 return __builtin_mve_vldrwq_z_sv4si ((__builtin_neon_si
*) __base
, __p
);
12904 __extension__
extern __inline uint32x4_t
12905 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12906 __arm_vldrwq_z_u32 (uint32_t const * __base
, mve_pred16_t __p
)
12908 return __builtin_mve_vldrwq_z_uv4si ((__builtin_neon_si
*) __base
, __p
);
12911 __extension__
extern __inline int64x2_t
12912 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12913 __arm_vldrdq_gather_base_s64 (uint64x2_t __addr
, const int __offset
)
12915 return __builtin_mve_vldrdq_gather_base_sv2di (__addr
, __offset
);
12918 __extension__
extern __inline uint64x2_t
12919 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12920 __arm_vldrdq_gather_base_u64 (uint64x2_t __addr
, const int __offset
)
12922 return __builtin_mve_vldrdq_gather_base_uv2di (__addr
, __offset
);
12925 __extension__
extern __inline int64x2_t
12926 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12927 __arm_vldrdq_gather_base_z_s64 (uint64x2_t __addr
, const int __offset
, mve_pred16_t __p
)
12929 return __builtin_mve_vldrdq_gather_base_z_sv2di (__addr
, __offset
, __p
);
12932 __extension__
extern __inline uint64x2_t
12933 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12934 __arm_vldrdq_gather_base_z_u64 (uint64x2_t __addr
, const int __offset
, mve_pred16_t __p
)
12936 return __builtin_mve_vldrdq_gather_base_z_uv2di (__addr
, __offset
, __p
);
12939 __extension__
extern __inline int64x2_t
12940 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12941 __arm_vldrdq_gather_offset_s64 (int64_t const * __base
, uint64x2_t __offset
)
12943 return __builtin_mve_vldrdq_gather_offset_sv2di ((__builtin_neon_di
*) __base
, __offset
);
12946 __extension__
extern __inline uint64x2_t
12947 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12948 __arm_vldrdq_gather_offset_u64 (uint64_t const * __base
, uint64x2_t __offset
)
12950 return __builtin_mve_vldrdq_gather_offset_uv2di ((__builtin_neon_di
*) __base
, __offset
);
12953 __extension__
extern __inline int64x2_t
12954 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12955 __arm_vldrdq_gather_offset_z_s64 (int64_t const * __base
, uint64x2_t __offset
, mve_pred16_t __p
)
12957 return __builtin_mve_vldrdq_gather_offset_z_sv2di ((__builtin_neon_di
*) __base
, __offset
, __p
);
12961 __extension__
extern __inline uint64x2_t
12962 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12963 __arm_vldrdq_gather_offset_z_u64 (uint64_t const * __base
, uint64x2_t __offset
, mve_pred16_t __p
)
12965 return __builtin_mve_vldrdq_gather_offset_z_uv2di ((__builtin_neon_di
*) __base
, __offset
, __p
);
12968 __extension__
extern __inline int64x2_t
12969 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12970 __arm_vldrdq_gather_shifted_offset_s64 (int64_t const * __base
, uint64x2_t __offset
)
12972 return __builtin_mve_vldrdq_gather_shifted_offset_sv2di ((__builtin_neon_di
*) __base
, __offset
);
12975 __extension__
extern __inline uint64x2_t
12976 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12977 __arm_vldrdq_gather_shifted_offset_u64 (uint64_t const * __base
, uint64x2_t __offset
)
12979 return __builtin_mve_vldrdq_gather_shifted_offset_uv2di ((__builtin_neon_di
*) __base
, __offset
);
12982 __extension__
extern __inline int64x2_t
12983 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12984 __arm_vldrdq_gather_shifted_offset_z_s64 (int64_t const * __base
, uint64x2_t __offset
, mve_pred16_t __p
)
12986 return __builtin_mve_vldrdq_gather_shifted_offset_z_sv2di ((__builtin_neon_di
*) __base
, __offset
, __p
);
12989 __extension__
extern __inline uint64x2_t
12990 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12991 __arm_vldrdq_gather_shifted_offset_z_u64 (uint64_t const * __base
, uint64x2_t __offset
, mve_pred16_t __p
)
12993 return __builtin_mve_vldrdq_gather_shifted_offset_z_uv2di ((__builtin_neon_di
*) __base
, __offset
, __p
);
12996 __extension__
extern __inline int32x4_t
12997 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
12998 __arm_vldrwq_gather_offset_s32 (int32_t const * __base
, uint32x4_t __offset
)
13000 return __builtin_mve_vldrwq_gather_offset_sv4si ((__builtin_neon_si
*) __base
, __offset
);
13003 __extension__
extern __inline uint32x4_t
13004 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13005 __arm_vldrwq_gather_offset_u32 (uint32_t const * __base
, uint32x4_t __offset
)
13007 return __builtin_mve_vldrwq_gather_offset_uv4si ((__builtin_neon_si
*) __base
, __offset
);
13010 __extension__
extern __inline int32x4_t
13011 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13012 __arm_vldrwq_gather_offset_z_s32 (int32_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
13014 return __builtin_mve_vldrwq_gather_offset_z_sv4si ((__builtin_neon_si
*) __base
, __offset
, __p
);
13017 __extension__
extern __inline uint32x4_t
13018 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13019 __arm_vldrwq_gather_offset_z_u32 (uint32_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
13021 return __builtin_mve_vldrwq_gather_offset_z_uv4si ((__builtin_neon_si
*) __base
, __offset
, __p
);
13024 __extension__
extern __inline int32x4_t
13025 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13026 __arm_vldrwq_gather_shifted_offset_s32 (int32_t const * __base
, uint32x4_t __offset
)
13028 return __builtin_mve_vldrwq_gather_shifted_offset_sv4si ((__builtin_neon_si
*) __base
, __offset
);
13031 __extension__
extern __inline uint32x4_t
13032 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13033 __arm_vldrwq_gather_shifted_offset_u32 (uint32_t const * __base
, uint32x4_t __offset
)
13035 return __builtin_mve_vldrwq_gather_shifted_offset_uv4si ((__builtin_neon_si
*) __base
, __offset
);
13038 __extension__
extern __inline int32x4_t
13039 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13040 __arm_vldrwq_gather_shifted_offset_z_s32 (int32_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
13042 return __builtin_mve_vldrwq_gather_shifted_offset_z_sv4si ((__builtin_neon_si
*) __base
, __offset
, __p
);
13045 __extension__
extern __inline uint32x4_t
13046 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13047 __arm_vldrwq_gather_shifted_offset_z_u32 (uint32_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
13049 return __builtin_mve_vldrwq_gather_shifted_offset_z_uv4si ((__builtin_neon_si
*) __base
, __offset
, __p
);
13052 __extension__
extern __inline
void
13053 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13054 __arm_vst1q_s8 (int8_t * __addr
, int8x16_t __value
)
13056 __builtin_mve_vst1q_sv16qi ((__builtin_neon_qi
*) __addr
, __value
);
13059 __extension__
extern __inline
void
13060 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13061 __arm_vst1q_s32 (int32_t * __addr
, int32x4_t __value
)
13063 __builtin_mve_vst1q_sv4si ((__builtin_neon_si
*) __addr
, __value
);
13066 __extension__
extern __inline
void
13067 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13068 __arm_vst1q_s16 (int16_t * __addr
, int16x8_t __value
)
13070 __builtin_mve_vst1q_sv8hi ((__builtin_neon_hi
*) __addr
, __value
);
13073 __extension__
extern __inline
void
13074 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13075 __arm_vst1q_u8 (uint8_t * __addr
, uint8x16_t __value
)
13077 __builtin_mve_vst1q_uv16qi ((__builtin_neon_qi
*) __addr
, __value
);
13080 __extension__
extern __inline
void
13081 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13082 __arm_vst1q_u32 (uint32_t * __addr
, uint32x4_t __value
)
13084 __builtin_mve_vst1q_uv4si ((__builtin_neon_si
*) __addr
, __value
);
13087 __extension__
extern __inline
void
13088 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13089 __arm_vst1q_u16 (uint16_t * __addr
, uint16x8_t __value
)
13091 __builtin_mve_vst1q_uv8hi ((__builtin_neon_hi
*) __addr
, __value
);
13094 __extension__
extern __inline
void
13095 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13096 __arm_vstrhq_scatter_offset_s32 (int16_t * __base
, uint32x4_t __offset
, int32x4_t __value
)
13098 __builtin_mve_vstrhq_scatter_offset_sv4si ((__builtin_neon_hi
*) __base
, __offset
, __value
);
13101 __extension__
extern __inline
void
13102 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13103 __arm_vstrhq_scatter_offset_s16 (int16_t * __base
, uint16x8_t __offset
, int16x8_t __value
)
13105 __builtin_mve_vstrhq_scatter_offset_sv8hi ((__builtin_neon_hi
*) __base
, __offset
, __value
);
13108 __extension__
extern __inline
void
13109 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13110 __arm_vstrhq_scatter_offset_u32 (uint16_t * __base
, uint32x4_t __offset
, uint32x4_t __value
)
13112 __builtin_mve_vstrhq_scatter_offset_uv4si ((__builtin_neon_hi
*) __base
, __offset
, __value
);
13115 __extension__
extern __inline
void
13116 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13117 __arm_vstrhq_scatter_offset_u16 (uint16_t * __base
, uint16x8_t __offset
, uint16x8_t __value
)
13119 __builtin_mve_vstrhq_scatter_offset_uv8hi ((__builtin_neon_hi
*) __base
, __offset
, __value
);
13122 __extension__
extern __inline
void
13123 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13124 __arm_vstrhq_scatter_offset_p_s32 (int16_t * __base
, uint32x4_t __offset
, int32x4_t __value
, mve_pred16_t __p
)
13126 __builtin_mve_vstrhq_scatter_offset_p_sv4si ((__builtin_neon_hi
*) __base
, __offset
, __value
, __p
);
13129 __extension__
extern __inline
void
13130 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13131 __arm_vstrhq_scatter_offset_p_s16 (int16_t * __base
, uint16x8_t __offset
, int16x8_t __value
, mve_pred16_t __p
)
13133 __builtin_mve_vstrhq_scatter_offset_p_sv8hi ((__builtin_neon_hi
*) __base
, __offset
, __value
, __p
);
13136 __extension__
extern __inline
void
13137 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13138 __arm_vstrhq_scatter_offset_p_u32 (uint16_t * __base
, uint32x4_t __offset
, uint32x4_t __value
, mve_pred16_t __p
)
13140 __builtin_mve_vstrhq_scatter_offset_p_uv4si ((__builtin_neon_hi
*) __base
, __offset
, __value
, __p
);
13143 __extension__
extern __inline
void
13144 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13145 __arm_vstrhq_scatter_offset_p_u16 (uint16_t * __base
, uint16x8_t __offset
, uint16x8_t __value
, mve_pred16_t __p
)
13147 __builtin_mve_vstrhq_scatter_offset_p_uv8hi ((__builtin_neon_hi
*) __base
, __offset
, __value
, __p
);
13150 __extension__
extern __inline
void
13151 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13152 __arm_vstrhq_scatter_shifted_offset_s32 (int16_t * __base
, uint32x4_t __offset
, int32x4_t __value
)
13154 __builtin_mve_vstrhq_scatter_shifted_offset_sv4si ((__builtin_neon_hi
*) __base
, __offset
, __value
);
13157 __extension__
extern __inline
void
13158 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13159 __arm_vstrhq_scatter_shifted_offset_s16 (int16_t * __base
, uint16x8_t __offset
, int16x8_t __value
)
13161 __builtin_mve_vstrhq_scatter_shifted_offset_sv8hi ((__builtin_neon_hi
*) __base
, __offset
, __value
);
13164 __extension__
extern __inline
void
13165 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13166 __arm_vstrhq_scatter_shifted_offset_u32 (uint16_t * __base
, uint32x4_t __offset
, uint32x4_t __value
)
13168 __builtin_mve_vstrhq_scatter_shifted_offset_uv4si ((__builtin_neon_hi
*) __base
, __offset
, __value
);
13171 __extension__
extern __inline
void
13172 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13173 __arm_vstrhq_scatter_shifted_offset_u16 (uint16_t * __base
, uint16x8_t __offset
, uint16x8_t __value
)
13175 __builtin_mve_vstrhq_scatter_shifted_offset_uv8hi ((__builtin_neon_hi
*) __base
, __offset
, __value
);
13178 __extension__
extern __inline
void
13179 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13180 __arm_vstrhq_scatter_shifted_offset_p_s32 (int16_t * __base
, uint32x4_t __offset
, int32x4_t __value
, mve_pred16_t __p
)
13182 __builtin_mve_vstrhq_scatter_shifted_offset_p_sv4si ((__builtin_neon_hi
*) __base
, __offset
, __value
, __p
);
13185 __extension__
extern __inline
void
13186 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13187 __arm_vstrhq_scatter_shifted_offset_p_s16 (int16_t * __base
, uint16x8_t __offset
, int16x8_t __value
, mve_pred16_t __p
)
13189 __builtin_mve_vstrhq_scatter_shifted_offset_p_sv8hi ((__builtin_neon_hi
*) __base
, __offset
, __value
, __p
);
13192 __extension__
extern __inline
void
13193 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13194 __arm_vstrhq_scatter_shifted_offset_p_u32 (uint16_t * __base
, uint32x4_t __offset
, uint32x4_t __value
, mve_pred16_t __p
)
13196 __builtin_mve_vstrhq_scatter_shifted_offset_p_uv4si ((__builtin_neon_hi
*) __base
, __offset
, __value
, __p
);
13199 __extension__
extern __inline
void
13200 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13201 __arm_vstrhq_scatter_shifted_offset_p_u16 (uint16_t * __base
, uint16x8_t __offset
, uint16x8_t __value
, mve_pred16_t __p
)
13203 __builtin_mve_vstrhq_scatter_shifted_offset_p_uv8hi ((__builtin_neon_hi
*) __base
, __offset
, __value
, __p
);
13206 __extension__
extern __inline
void
13207 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13208 __arm_vstrhq_s32 (int16_t * __addr
, int32x4_t __value
)
13210 __builtin_mve_vstrhq_sv4si ((__builtin_neon_hi
*) __addr
, __value
);
13213 __extension__
extern __inline
void
13214 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13215 __arm_vstrhq_s16 (int16_t * __addr
, int16x8_t __value
)
13217 __builtin_mve_vstrhq_sv8hi ((__builtin_neon_hi
*) __addr
, __value
);
13220 __extension__
extern __inline
void
13221 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13222 __arm_vstrhq_u32 (uint16_t * __addr
, uint32x4_t __value
)
13224 __builtin_mve_vstrhq_uv4si ((__builtin_neon_hi
*) __addr
, __value
);
13227 __extension__
extern __inline
void
13228 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13229 __arm_vstrhq_u16 (uint16_t * __addr
, uint16x8_t __value
)
13231 __builtin_mve_vstrhq_uv8hi ((__builtin_neon_hi
*) __addr
, __value
);
13234 __extension__
extern __inline
void
13235 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13236 __arm_vstrhq_p_s32 (int16_t * __addr
, int32x4_t __value
, mve_pred16_t __p
)
13238 __builtin_mve_vstrhq_p_sv4si ((__builtin_neon_hi
*) __addr
, __value
, __p
);
13241 __extension__
extern __inline
void
13242 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13243 __arm_vstrhq_p_s16 (int16_t * __addr
, int16x8_t __value
, mve_pred16_t __p
)
13245 __builtin_mve_vstrhq_p_sv8hi ((__builtin_neon_hi
*) __addr
, __value
, __p
);
13248 __extension__
extern __inline
void
13249 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13250 __arm_vstrhq_p_u32 (uint16_t * __addr
, uint32x4_t __value
, mve_pred16_t __p
)
13252 __builtin_mve_vstrhq_p_uv4si ((__builtin_neon_hi
*) __addr
, __value
, __p
);
13255 __extension__
extern __inline
void
13256 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13257 __arm_vstrhq_p_u16 (uint16_t * __addr
, uint16x8_t __value
, mve_pred16_t __p
)
13259 __builtin_mve_vstrhq_p_uv8hi ((__builtin_neon_hi
*) __addr
, __value
, __p
);
13262 __extension__
extern __inline
void
13263 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13264 __arm_vstrwq_s32 (int32_t * __addr
, int32x4_t __value
)
13266 __builtin_mve_vstrwq_sv4si ((__builtin_neon_si
*) __addr
, __value
);
13269 __extension__
extern __inline
void
13270 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13271 __arm_vstrwq_u32 (uint32_t * __addr
, uint32x4_t __value
)
13273 __builtin_mve_vstrwq_uv4si ((__builtin_neon_si
*) __addr
, __value
);
13276 __extension__
extern __inline
void
13277 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13278 __arm_vstrwq_p_s32 (int32_t * __addr
, int32x4_t __value
, mve_pred16_t __p
)
13280 __builtin_mve_vstrwq_p_sv4si ((__builtin_neon_si
*) __addr
, __value
, __p
);
13283 __extension__
extern __inline
void
13284 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13285 __arm_vstrwq_p_u32 (uint32_t * __addr
, uint32x4_t __value
, mve_pred16_t __p
)
13287 __builtin_mve_vstrwq_p_uv4si ((__builtin_neon_si
*) __addr
, __value
, __p
);
13290 __extension__
extern __inline
void
13291 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13292 __arm_vstrdq_scatter_base_p_s64 (uint64x2_t __addr
, const int __offset
, int64x2_t __value
, mve_pred16_t __p
)
13294 __builtin_mve_vstrdq_scatter_base_p_sv2di (__addr
, __offset
, __value
, __p
);
13297 __extension__
extern __inline
void
13298 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13299 __arm_vstrdq_scatter_base_p_u64 (uint64x2_t __addr
, const int __offset
, uint64x2_t __value
, mve_pred16_t __p
)
13301 __builtin_mve_vstrdq_scatter_base_p_uv2di (__addr
, __offset
, __value
, __p
);
13304 __extension__
extern __inline
void
13305 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13306 __arm_vstrdq_scatter_base_s64 (uint64x2_t __addr
, const int __offset
, int64x2_t __value
)
13308 __builtin_mve_vstrdq_scatter_base_sv2di (__addr
, __offset
, __value
);
13311 __extension__
extern __inline
void
13312 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13313 __arm_vstrdq_scatter_base_u64 (uint64x2_t __addr
, const int __offset
, uint64x2_t __value
)
13315 __builtin_mve_vstrdq_scatter_base_uv2di (__addr
, __offset
, __value
);
13318 __extension__
extern __inline
void
13319 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13320 __arm_vstrdq_scatter_offset_p_s64 (int64_t * __base
, uint64x2_t __offset
, int64x2_t __value
, mve_pred16_t __p
)
13322 __builtin_mve_vstrdq_scatter_offset_p_sv2di ((__builtin_neon_di
*) __base
, __offset
, __value
, __p
);
13325 __extension__
extern __inline
void
13326 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13327 __arm_vstrdq_scatter_offset_p_u64 (uint64_t * __base
, uint64x2_t __offset
, uint64x2_t __value
, mve_pred16_t __p
)
13329 __builtin_mve_vstrdq_scatter_offset_p_uv2di ((__builtin_neon_di
*) __base
, __offset
, __value
, __p
);
13332 __extension__
extern __inline
void
13333 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13334 __arm_vstrdq_scatter_offset_s64 (int64_t * __base
, uint64x2_t __offset
, int64x2_t __value
)
13336 __builtin_mve_vstrdq_scatter_offset_sv2di ((__builtin_neon_di
*) __base
, __offset
, __value
);
13339 __extension__
extern __inline
void
13340 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13341 __arm_vstrdq_scatter_offset_u64 (uint64_t * __base
, uint64x2_t __offset
, uint64x2_t __value
)
13343 __builtin_mve_vstrdq_scatter_offset_uv2di ((__builtin_neon_di
*) __base
, __offset
, __value
);
13346 __extension__
extern __inline
void
13347 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13348 __arm_vstrdq_scatter_shifted_offset_p_s64 (int64_t * __base
, uint64x2_t __offset
, int64x2_t __value
, mve_pred16_t __p
)
13350 __builtin_mve_vstrdq_scatter_shifted_offset_p_sv2di ((__builtin_neon_di
*) __base
, __offset
, __value
, __p
);
13353 __extension__
extern __inline
void
13354 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13355 __arm_vstrdq_scatter_shifted_offset_p_u64 (uint64_t * __base
, uint64x2_t __offset
, uint64x2_t __value
, mve_pred16_t __p
)
13357 __builtin_mve_vstrdq_scatter_shifted_offset_p_uv2di ((__builtin_neon_di
*) __base
, __offset
, __value
, __p
);
13360 __extension__
extern __inline
void
13361 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13362 __arm_vstrdq_scatter_shifted_offset_s64 (int64_t * __base
, uint64x2_t __offset
, int64x2_t __value
)
13364 __builtin_mve_vstrdq_scatter_shifted_offset_sv2di ((__builtin_neon_di
*) __base
, __offset
, __value
);
13367 __extension__
extern __inline
void
13368 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13369 __arm_vstrdq_scatter_shifted_offset_u64 (uint64_t * __base
, uint64x2_t __offset
, uint64x2_t __value
)
13371 __builtin_mve_vstrdq_scatter_shifted_offset_uv2di ((__builtin_neon_di
*) __base
, __offset
, __value
);
13374 __extension__
extern __inline
void
13375 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13376 __arm_vstrwq_scatter_offset_p_s32 (int32_t * __base
, uint32x4_t __offset
, int32x4_t __value
, mve_pred16_t __p
)
13378 __builtin_mve_vstrwq_scatter_offset_p_sv4si ((__builtin_neon_si
*) __base
, __offset
, __value
, __p
);
13381 __extension__
extern __inline
void
13382 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13383 __arm_vstrwq_scatter_offset_p_u32 (uint32_t * __base
, uint32x4_t __offset
, uint32x4_t __value
, mve_pred16_t __p
)
13385 __builtin_mve_vstrwq_scatter_offset_p_uv4si ((__builtin_neon_si
*) __base
, __offset
, __value
, __p
);
13388 __extension__
extern __inline
void
13389 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13390 __arm_vstrwq_scatter_offset_s32 (int32_t * __base
, uint32x4_t __offset
, int32x4_t __value
)
13392 __builtin_mve_vstrwq_scatter_offset_sv4si ((__builtin_neon_si
*) __base
, __offset
, __value
);
13395 __extension__
extern __inline
void
13396 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13397 __arm_vstrwq_scatter_offset_u32 (uint32_t * __base
, uint32x4_t __offset
, uint32x4_t __value
)
13399 __builtin_mve_vstrwq_scatter_offset_uv4si ((__builtin_neon_si
*) __base
, __offset
, __value
);
13402 __extension__
extern __inline
void
13403 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13404 __arm_vstrwq_scatter_shifted_offset_p_s32 (int32_t * __base
, uint32x4_t __offset
, int32x4_t __value
, mve_pred16_t __p
)
13406 __builtin_mve_vstrwq_scatter_shifted_offset_p_sv4si ((__builtin_neon_si
*) __base
, __offset
, __value
, __p
);
13409 __extension__
extern __inline
void
13410 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13411 __arm_vstrwq_scatter_shifted_offset_p_u32 (uint32_t * __base
, uint32x4_t __offset
, uint32x4_t __value
, mve_pred16_t __p
)
13413 __builtin_mve_vstrwq_scatter_shifted_offset_p_uv4si ((__builtin_neon_si
*) __base
, __offset
, __value
, __p
);
13416 __extension__
extern __inline
void
13417 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13418 __arm_vstrwq_scatter_shifted_offset_s32 (int32_t * __base
, uint32x4_t __offset
, int32x4_t __value
)
13420 __builtin_mve_vstrwq_scatter_shifted_offset_sv4si ((__builtin_neon_si
*) __base
, __offset
, __value
);
13423 __extension__
extern __inline
void
13424 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13425 __arm_vstrwq_scatter_shifted_offset_u32 (uint32_t * __base
, uint32x4_t __offset
, uint32x4_t __value
)
13427 __builtin_mve_vstrwq_scatter_shifted_offset_uv4si ((__builtin_neon_si
*) __base
, __offset
, __value
);
13430 __extension__
extern __inline int8x16_t
13431 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13432 __arm_vaddq_s8 (int8x16_t __a
, int8x16_t __b
)
13437 __extension__
extern __inline int16x8_t
13438 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13439 __arm_vaddq_s16 (int16x8_t __a
, int16x8_t __b
)
13444 __extension__
extern __inline int32x4_t
13445 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13446 __arm_vaddq_s32 (int32x4_t __a
, int32x4_t __b
)
13451 __extension__
extern __inline uint8x16_t
13452 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13453 __arm_vaddq_u8 (uint8x16_t __a
, uint8x16_t __b
)
13458 __extension__
extern __inline uint16x8_t
13459 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13460 __arm_vaddq_u16 (uint16x8_t __a
, uint16x8_t __b
)
13465 __extension__
extern __inline uint32x4_t
13466 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13467 __arm_vaddq_u32 (uint32x4_t __a
, uint32x4_t __b
)
13472 __extension__
extern __inline uint8x16_t
13473 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13474 __arm_vddupq_m_n_u8 (uint8x16_t __inactive
, uint32_t __a
, const int __imm
, mve_pred16_t __p
)
13476 return __builtin_mve_vddupq_m_n_uv16qi (__inactive
, __a
, __imm
, __p
);
13479 __extension__
extern __inline uint32x4_t
13480 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13481 __arm_vddupq_m_n_u32 (uint32x4_t __inactive
, uint32_t __a
, const int __imm
, mve_pred16_t __p
)
13483 return __builtin_mve_vddupq_m_n_uv4si (__inactive
, __a
, __imm
, __p
);
13486 __extension__
extern __inline uint16x8_t
13487 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13488 __arm_vddupq_m_n_u16 (uint16x8_t __inactive
, uint32_t __a
, const int __imm
, mve_pred16_t __p
)
13490 return __builtin_mve_vddupq_m_n_uv8hi (__inactive
, __a
, __imm
, __p
);
13493 __extension__
extern __inline uint8x16_t
13494 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13495 __arm_vddupq_m_wb_u8 (uint8x16_t __inactive
, uint32_t * __a
, const int __imm
, mve_pred16_t __p
)
13497 uint8x16_t __res
= __builtin_mve_vddupq_m_n_uv16qi (__inactive
, * __a
, __imm
, __p
);
13498 *__a
-= __imm
* 16u;
13502 __extension__
extern __inline uint16x8_t
13503 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13504 __arm_vddupq_m_wb_u16 (uint16x8_t __inactive
, uint32_t * __a
, const int __imm
, mve_pred16_t __p
)
13506 uint16x8_t __res
= __builtin_mve_vddupq_m_n_uv8hi (__inactive
, *__a
, __imm
, __p
);
13507 *__a
-= __imm
* 8u;
13511 __extension__
extern __inline uint32x4_t
13512 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13513 __arm_vddupq_m_wb_u32 (uint32x4_t __inactive
, uint32_t * __a
, const int __imm
, mve_pred16_t __p
)
13515 uint32x4_t __res
= __builtin_mve_vddupq_m_n_uv4si (__inactive
, *__a
, __imm
, __p
);
13516 *__a
-= __imm
* 4u;
13520 __extension__
extern __inline uint8x16_t
13521 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13522 __arm_vddupq_n_u8 (uint32_t __a
, const int __imm
)
13524 return __builtin_mve_vddupq_n_uv16qi (__a
, __imm
);
13527 __extension__
extern __inline uint32x4_t
13528 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13529 __arm_vddupq_n_u32 (uint32_t __a
, const int __imm
)
13531 return __builtin_mve_vddupq_n_uv4si (__a
, __imm
);
13534 __extension__
extern __inline uint16x8_t
13535 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13536 __arm_vddupq_n_u16 (uint32_t __a
, const int __imm
)
13538 return __builtin_mve_vddupq_n_uv8hi (__a
, __imm
);
13541 __extension__
extern __inline uint8x16_t
13542 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13543 __arm_vdwdupq_m_n_u8 (uint8x16_t __inactive
, uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
13545 uint64_t __c
= ((uint64_t) __b
) << 32;
13546 return __builtin_mve_vdwdupq_m_n_uv16qi (__inactive
, __a
, __c
, __imm
, __p
);
13549 __extension__
extern __inline uint32x4_t
13550 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13551 __arm_vdwdupq_m_n_u32 (uint32x4_t __inactive
, uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
13553 uint64_t __c
= ((uint64_t) __b
) << 32;
13554 return __builtin_mve_vdwdupq_m_n_uv4si (__inactive
, __a
, __c
, __imm
, __p
);
13557 __extension__
extern __inline uint16x8_t
13558 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13559 __arm_vdwdupq_m_n_u16 (uint16x8_t __inactive
, uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
13561 uint64_t __c
= ((uint64_t) __b
) << 32;
13562 return __builtin_mve_vdwdupq_m_n_uv8hi (__inactive
, __a
, __c
, __imm
, __p
);
13565 __extension__
extern __inline uint8x16_t
13566 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13567 __arm_vdwdupq_m_wb_u8 (uint8x16_t __inactive
, uint32_t * __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
13569 uint64_t __c
= ((uint64_t) __b
) << 32;
13570 uint8x16_t __res
= __builtin_mve_vdwdupq_m_n_uv16qi (__inactive
, *__a
, __c
, __imm
, __p
);
13571 *__a
= __builtin_mve_vdwdupq_m_wb_uv16qi (__inactive
, *__a
, __c
, __imm
, __p
);
13575 __extension__
extern __inline uint32x4_t
13576 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13577 __arm_vdwdupq_m_wb_u32 (uint32x4_t __inactive
, uint32_t * __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
13579 uint64_t __c
= ((uint64_t) __b
) << 32;
13580 uint32x4_t __res
= __builtin_mve_vdwdupq_m_n_uv4si (__inactive
, *__a
, __c
, __imm
, __p
);
13581 *__a
= __builtin_mve_vdwdupq_m_wb_uv4si (__inactive
, *__a
, __c
, __imm
, __p
);
13585 __extension__
extern __inline uint16x8_t
13586 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13587 __arm_vdwdupq_m_wb_u16 (uint16x8_t __inactive
, uint32_t * __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
13589 uint64_t __c
= ((uint64_t) __b
) << 32;
13590 uint16x8_t __res
= __builtin_mve_vdwdupq_m_n_uv8hi (__inactive
, *__a
, __c
, __imm
, __p
);
13591 *__a
= __builtin_mve_vdwdupq_m_wb_uv8hi (__inactive
, *__a
, __c
, __imm
, __p
);
13595 __extension__
extern __inline uint8x16_t
13596 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13597 __arm_vdwdupq_n_u8 (uint32_t __a
, uint32_t __b
, const int __imm
)
13599 uint64_t __c
= ((uint64_t) __b
) << 32;
13600 return __builtin_mve_vdwdupq_n_uv16qi (__a
, __c
, __imm
);
13603 __extension__
extern __inline uint32x4_t
13604 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13605 __arm_vdwdupq_n_u32 (uint32_t __a
, uint32_t __b
, const int __imm
)
13607 uint64_t __c
= ((uint64_t) __b
) << 32;
13608 return __builtin_mve_vdwdupq_n_uv4si (__a
, __c
, __imm
);
13611 __extension__
extern __inline uint16x8_t
13612 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13613 __arm_vdwdupq_n_u16 (uint32_t __a
, uint32_t __b
, const int __imm
)
13615 uint64_t __c
= ((uint64_t) __b
) << 32;
13616 return __builtin_mve_vdwdupq_n_uv8hi (__a
, __c
, __imm
);
13619 __extension__
extern __inline uint8x16_t
13620 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13621 __arm_vdwdupq_wb_u8 (uint32_t * __a
, uint32_t __b
, const int __imm
)
13623 uint64_t __c
= ((uint64_t) __b
) << 32;
13624 uint8x16_t __res
= __builtin_mve_vdwdupq_n_uv16qi (*__a
, __c
, __imm
);
13625 *__a
= __builtin_mve_vdwdupq_wb_uv16qi (*__a
, __c
, __imm
);
13629 __extension__
extern __inline uint32x4_t
13630 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13631 __arm_vdwdupq_wb_u32 (uint32_t * __a
, uint32_t __b
, const int __imm
)
13633 uint64_t __c
= ((uint64_t) __b
) << 32;
13634 uint32x4_t __res
= __builtin_mve_vdwdupq_n_uv4si (*__a
, __c
, __imm
);
13635 *__a
= __builtin_mve_vdwdupq_wb_uv4si (*__a
, __c
, __imm
);
13639 __extension__
extern __inline uint16x8_t
13640 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13641 __arm_vdwdupq_wb_u16 (uint32_t * __a
, uint32_t __b
, const int __imm
)
13643 uint64_t __c
= ((uint64_t) __b
) << 32;
13644 uint16x8_t __res
= __builtin_mve_vdwdupq_n_uv8hi (*__a
, __c
, __imm
);
13645 *__a
= __builtin_mve_vdwdupq_wb_uv8hi (*__a
, __c
, __imm
);
13649 __extension__
extern __inline uint8x16_t
13650 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13651 __arm_vidupq_m_n_u8 (uint8x16_t __inactive
, uint32_t __a
, const int __imm
, mve_pred16_t __p
)
13653 return __builtin_mve_vidupq_m_n_uv16qi (__inactive
, __a
, __imm
, __p
);
13656 __extension__
extern __inline uint32x4_t
13657 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13658 __arm_vidupq_m_n_u32 (uint32x4_t __inactive
, uint32_t __a
, const int __imm
, mve_pred16_t __p
)
13660 return __builtin_mve_vidupq_m_n_uv4si (__inactive
, __a
, __imm
, __p
);
13663 __extension__
extern __inline uint16x8_t
13664 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13665 __arm_vidupq_m_n_u16 (uint16x8_t __inactive
, uint32_t __a
, const int __imm
, mve_pred16_t __p
)
13667 return __builtin_mve_vidupq_m_n_uv8hi (__inactive
, __a
, __imm
, __p
);
13670 __extension__
extern __inline uint8x16_t
13671 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13672 __arm_vidupq_n_u8 (uint32_t __a
, const int __imm
)
13674 return __builtin_mve_vidupq_n_uv16qi (__a
, __imm
);
13677 __extension__
extern __inline uint8x16_t
13678 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13679 __arm_vidupq_m_wb_u8 (uint8x16_t __inactive
, uint32_t * __a
, const int __imm
, mve_pred16_t __p
)
13681 uint8x16_t __res
= __builtin_mve_vidupq_m_n_uv16qi (__inactive
, *__a
, __imm
, __p
);
13682 *__a
+= __imm
* 16u;
13686 __extension__
extern __inline uint16x8_t
13687 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13688 __arm_vidupq_m_wb_u16 (uint16x8_t __inactive
, uint32_t * __a
, const int __imm
, mve_pred16_t __p
)
13690 uint16x8_t __res
= __builtin_mve_vidupq_m_n_uv8hi (__inactive
, *__a
, __imm
, __p
);
13691 *__a
+= __imm
* 8u;
13695 __extension__
extern __inline uint32x4_t
13696 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13697 __arm_vidupq_m_wb_u32 (uint32x4_t __inactive
, uint32_t * __a
, const int __imm
, mve_pred16_t __p
)
13699 uint32x4_t __res
= __builtin_mve_vidupq_m_n_uv4si (__inactive
, *__a
, __imm
, __p
);
13700 *__a
+= __imm
* 4u;
13704 __extension__
extern __inline uint32x4_t
13705 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13706 __arm_vidupq_n_u32 (uint32_t __a
, const int __imm
)
13708 return __builtin_mve_vidupq_n_uv4si (__a
, __imm
);
13711 __extension__
extern __inline uint16x8_t
13712 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13713 __arm_vidupq_n_u16 (uint32_t __a
, const int __imm
)
13715 return __builtin_mve_vidupq_n_uv8hi (__a
, __imm
);
13718 __extension__
extern __inline uint8x16_t
13719 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13720 __arm_vidupq_wb_u8 (uint32_t * __a
, const int __imm
)
13722 uint8x16_t __res
= __builtin_mve_vidupq_n_uv16qi (*__a
, __imm
);
13723 *__a
+= __imm
* 16u;
13727 __extension__
extern __inline uint16x8_t
13728 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13729 __arm_vidupq_wb_u16 (uint32_t * __a
, const int __imm
)
13731 uint16x8_t __res
= __builtin_mve_vidupq_n_uv8hi (*__a
, __imm
);
13732 *__a
+= __imm
* 8u;
13736 __extension__
extern __inline uint32x4_t
13737 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13738 __arm_vidupq_wb_u32 (uint32_t * __a
, const int __imm
)
13740 uint32x4_t __res
= __builtin_mve_vidupq_n_uv4si (*__a
, __imm
);
13741 *__a
+= __imm
* 4u;
13745 __extension__
extern __inline uint8x16_t
13746 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13747 __arm_vddupq_wb_u8 (uint32_t * __a
, const int __imm
)
13749 uint8x16_t __res
= __builtin_mve_vddupq_n_uv16qi (*__a
, __imm
);
13750 *__a
-= __imm
* 16u;
13754 __extension__
extern __inline uint16x8_t
13755 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13756 __arm_vddupq_wb_u16 (uint32_t * __a
, const int __imm
)
13758 uint16x8_t __res
= __builtin_mve_vddupq_n_uv8hi (*__a
, __imm
);
13759 *__a
-= __imm
* 8u;
13763 __extension__
extern __inline uint32x4_t
13764 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13765 __arm_vddupq_wb_u32 (uint32_t * __a
, const int __imm
)
13767 uint32x4_t __res
= __builtin_mve_vddupq_n_uv4si (*__a
, __imm
);
13768 *__a
-= __imm
* 4u;
13772 __extension__
extern __inline uint8x16_t
13773 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13774 __arm_viwdupq_m_n_u8 (uint8x16_t __inactive
, uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
13776 uint64_t __c
= ((uint64_t) __b
) << 32;
13777 return __builtin_mve_viwdupq_m_n_uv16qi (__inactive
, __a
, __c
, __imm
, __p
);
13780 __extension__
extern __inline uint32x4_t
13781 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13782 __arm_viwdupq_m_n_u32 (uint32x4_t __inactive
, uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
13784 uint64_t __c
= ((uint64_t) __b
) << 32;
13785 return __builtin_mve_viwdupq_m_n_uv4si (__inactive
, __a
, __c
, __imm
, __p
);
13788 __extension__
extern __inline uint16x8_t
13789 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13790 __arm_viwdupq_m_n_u16 (uint16x8_t __inactive
, uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
13792 uint64_t __c
= ((uint64_t) __b
) << 32;
13793 return __builtin_mve_viwdupq_m_n_uv8hi (__inactive
, __a
, __c
, __imm
, __p
);
13796 __extension__
extern __inline uint8x16_t
13797 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13798 __arm_viwdupq_m_wb_u8 (uint8x16_t __inactive
, uint32_t * __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
13800 uint64_t __c
= ((uint64_t) __b
) << 32;
13801 uint8x16_t __res
= __builtin_mve_viwdupq_m_n_uv16qi (__inactive
, *__a
, __c
, __imm
, __p
);
13802 *__a
= __builtin_mve_viwdupq_m_wb_uv16qi (__inactive
, *__a
, __c
, __imm
, __p
);
13806 __extension__
extern __inline uint32x4_t
13807 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13808 __arm_viwdupq_m_wb_u32 (uint32x4_t __inactive
, uint32_t * __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
13810 uint64_t __c
= ((uint64_t) __b
) << 32;
13811 uint32x4_t __res
= __builtin_mve_viwdupq_m_n_uv4si (__inactive
, *__a
, __c
, __imm
, __p
);
13812 *__a
= __builtin_mve_viwdupq_m_wb_uv4si (__inactive
, *__a
, __c
, __imm
, __p
);
13816 __extension__
extern __inline uint16x8_t
13817 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13818 __arm_viwdupq_m_wb_u16 (uint16x8_t __inactive
, uint32_t * __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
13820 uint64_t __c
= ((uint64_t) __b
) << 32;
13821 uint16x8_t __res
= __builtin_mve_viwdupq_m_n_uv8hi (__inactive
, *__a
, __c
, __imm
, __p
);
13822 *__a
= __builtin_mve_viwdupq_m_wb_uv8hi (__inactive
, *__a
, __c
, __imm
, __p
);
13826 __extension__
extern __inline uint8x16_t
13827 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13828 __arm_viwdupq_n_u8 (uint32_t __a
, uint32_t __b
, const int __imm
)
13830 uint64_t __c
= ((uint64_t) __b
) << 32;
13831 return __builtin_mve_viwdupq_n_uv16qi (__a
, __c
, __imm
);
13834 __extension__
extern __inline uint32x4_t
13835 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13836 __arm_viwdupq_n_u32 (uint32_t __a
, uint32_t __b
, const int __imm
)
13838 uint64_t __c
= ((uint64_t) __b
) << 32;
13839 return __builtin_mve_viwdupq_n_uv4si (__a
, __c
, __imm
);
13842 __extension__
extern __inline uint16x8_t
13843 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13844 __arm_viwdupq_n_u16 (uint32_t __a
, uint32_t __b
, const int __imm
)
13846 uint64_t __c
= ((uint64_t) __b
) << 32;
13847 return __builtin_mve_viwdupq_n_uv8hi (__a
, __c
, __imm
);
13850 __extension__
extern __inline uint8x16_t
13851 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13852 __arm_viwdupq_wb_u8 (uint32_t * __a
, uint32_t __b
, const int __imm
)
13854 uint64_t __c
= ((uint64_t) __b
) << 32;
13855 uint8x16_t __res
= __builtin_mve_viwdupq_n_uv16qi (*__a
, __c
, __imm
);
13856 *__a
= __builtin_mve_viwdupq_wb_uv16qi (*__a
, __c
, __imm
);
13860 __extension__
extern __inline uint32x4_t
13861 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13862 __arm_viwdupq_wb_u32 (uint32_t * __a
, uint32_t __b
, const int __imm
)
13864 uint64_t __c
= ((uint64_t) __b
) << 32;
13865 uint32x4_t __res
= __builtin_mve_viwdupq_n_uv4si (*__a
, __c
, __imm
);
13866 *__a
= __builtin_mve_viwdupq_wb_uv4si (*__a
, __c
, __imm
);
13870 __extension__
extern __inline uint16x8_t
13871 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13872 __arm_viwdupq_wb_u16 (uint32_t * __a
, uint32_t __b
, const int __imm
)
13874 uint64_t __c
= ((uint64_t) __b
) << 32;
13875 uint16x8_t __res
= __builtin_mve_viwdupq_n_uv8hi (*__a
, __c
, __imm
);
13876 *__a
= __builtin_mve_viwdupq_wb_uv8hi (*__a
, __c
, __imm
);
13881 __extension__
extern __inline int64x2_t
13882 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13883 __arm_vldrdq_gather_base_wb_s64 (uint64x2_t
* __addr
, const int __offset
)
13886 result
= __builtin_mve_vldrdq_gather_base_nowb_sv2di (*__addr
, __offset
);
13887 *__addr
= __builtin_mve_vldrdq_gather_base_wb_sv2di (*__addr
, __offset
);
13891 __extension__
extern __inline uint64x2_t
13892 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13893 __arm_vldrdq_gather_base_wb_u64 (uint64x2_t
* __addr
, const int __offset
)
13896 result
= __builtin_mve_vldrdq_gather_base_nowb_uv2di (*__addr
, __offset
);
13897 *__addr
= __builtin_mve_vldrdq_gather_base_wb_uv2di (*__addr
, __offset
);
13901 __extension__
extern __inline int64x2_t
13902 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13903 __arm_vldrdq_gather_base_wb_z_s64 (uint64x2_t
* __addr
, const int __offset
, mve_pred16_t __p
)
13906 result
= __builtin_mve_vldrdq_gather_base_nowb_z_sv2di (*__addr
, __offset
, __p
);
13907 *__addr
= __builtin_mve_vldrdq_gather_base_wb_z_sv2di (*__addr
, __offset
, __p
);
13911 __extension__
extern __inline uint64x2_t
13912 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13913 __arm_vldrdq_gather_base_wb_z_u64 (uint64x2_t
* __addr
, const int __offset
, mve_pred16_t __p
)
13916 result
= __builtin_mve_vldrdq_gather_base_nowb_z_uv2di (*__addr
, __offset
, __p
);
13917 *__addr
= __builtin_mve_vldrdq_gather_base_wb_z_uv2di (*__addr
, __offset
, __p
);
13921 __extension__
extern __inline int32x4_t
13922 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13923 __arm_vldrwq_gather_base_wb_s32 (uint32x4_t
* __addr
, const int __offset
)
13926 result
= __builtin_mve_vldrwq_gather_base_nowb_sv4si (*__addr
, __offset
);
13927 *__addr
= __builtin_mve_vldrwq_gather_base_wb_sv4si (*__addr
, __offset
);
13931 __extension__
extern __inline uint32x4_t
13932 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13933 __arm_vldrwq_gather_base_wb_u32 (uint32x4_t
* __addr
, const int __offset
)
13936 result
= __builtin_mve_vldrwq_gather_base_nowb_uv4si (*__addr
, __offset
);
13937 *__addr
= __builtin_mve_vldrwq_gather_base_wb_uv4si (*__addr
, __offset
);
13941 __extension__
extern __inline int32x4_t
13942 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13943 __arm_vldrwq_gather_base_wb_z_s32 (uint32x4_t
* __addr
, const int __offset
, mve_pred16_t __p
)
13946 result
= __builtin_mve_vldrwq_gather_base_nowb_z_sv4si (*__addr
, __offset
, __p
);
13947 *__addr
= __builtin_mve_vldrwq_gather_base_wb_z_sv4si (*__addr
, __offset
, __p
);
13951 __extension__
extern __inline uint32x4_t
13952 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13953 __arm_vldrwq_gather_base_wb_z_u32 (uint32x4_t
* __addr
, const int __offset
, mve_pred16_t __p
)
13956 result
= __builtin_mve_vldrwq_gather_base_nowb_z_uv4si (*__addr
, __offset
, __p
);
13957 *__addr
= __builtin_mve_vldrwq_gather_base_wb_z_uv4si (*__addr
, __offset
, __p
);
13961 __extension__
extern __inline
void
13962 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13963 __arm_vstrdq_scatter_base_wb_s64 (uint64x2_t
* __addr
, const int __offset
, int64x2_t __value
)
13965 *__addr
= __builtin_mve_vstrdq_scatter_base_wb_sv2di (*__addr
, __offset
, __value
);
13968 __extension__
extern __inline
void
13969 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13970 __arm_vstrdq_scatter_base_wb_u64 (uint64x2_t
* __addr
, const int __offset
, uint64x2_t __value
)
13972 *__addr
= __builtin_mve_vstrdq_scatter_base_wb_uv2di (*__addr
, __offset
, __value
);
13975 __extension__
extern __inline
void
13976 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13977 __arm_vstrdq_scatter_base_wb_p_s64 (uint64x2_t
* __addr
, const int __offset
, int64x2_t __value
, mve_pred16_t __p
)
13979 *__addr
= __builtin_mve_vstrdq_scatter_base_wb_p_sv2di (*__addr
, __offset
, __value
, __p
);
13982 __extension__
extern __inline
void
13983 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13984 __arm_vstrdq_scatter_base_wb_p_u64 (uint64x2_t
* __addr
, const int __offset
, uint64x2_t __value
, mve_pred16_t __p
)
13986 *__addr
= __builtin_mve_vstrdq_scatter_base_wb_p_uv2di (*__addr
, __offset
, __value
, __p
);
13989 __extension__
extern __inline
void
13990 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13991 __arm_vstrwq_scatter_base_wb_p_s32 (uint32x4_t
* __addr
, const int __offset
, int32x4_t __value
, mve_pred16_t __p
)
13993 *__addr
= __builtin_mve_vstrwq_scatter_base_wb_p_sv4si (*__addr
, __offset
, __value
, __p
);
13996 __extension__
extern __inline
void
13997 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
13998 __arm_vstrwq_scatter_base_wb_p_u32 (uint32x4_t
* __addr
, const int __offset
, uint32x4_t __value
, mve_pred16_t __p
)
14000 *__addr
= __builtin_mve_vstrwq_scatter_base_wb_p_uv4si (*__addr
, __offset
, __value
, __p
);
14003 __extension__
extern __inline
void
14004 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14005 __arm_vstrwq_scatter_base_wb_s32 (uint32x4_t
* __addr
, const int __offset
, int32x4_t __value
)
14007 *__addr
= __builtin_mve_vstrwq_scatter_base_wb_sv4si (*__addr
, __offset
, __value
);
14010 __extension__
extern __inline
void
14011 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14012 __arm_vstrwq_scatter_base_wb_u32 (uint32x4_t
* __addr
, const int __offset
, uint32x4_t __value
)
14014 *__addr
= __builtin_mve_vstrwq_scatter_base_wb_uv4si (*__addr
, __offset
, __value
);
14017 __extension__
extern __inline uint8x16_t
14018 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14019 __arm_vddupq_x_n_u8 (uint32_t __a
, const int __imm
, mve_pred16_t __p
)
14021 return __builtin_mve_vddupq_m_n_uv16qi (__arm_vuninitializedq_u8 (), __a
, __imm
, __p
);
14024 __extension__
extern __inline uint16x8_t
14025 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14026 __arm_vddupq_x_n_u16 (uint32_t __a
, const int __imm
, mve_pred16_t __p
)
14028 return __builtin_mve_vddupq_m_n_uv8hi (__arm_vuninitializedq_u16 (), __a
, __imm
, __p
);
14031 __extension__
extern __inline uint32x4_t
14032 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14033 __arm_vddupq_x_n_u32 (uint32_t __a
, const int __imm
, mve_pred16_t __p
)
14035 return __builtin_mve_vddupq_m_n_uv4si (__arm_vuninitializedq_u32 (), __a
, __imm
, __p
);
14038 __extension__
extern __inline uint8x16_t
14039 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14040 __arm_vddupq_x_wb_u8 (uint32_t *__a
, const int __imm
, mve_pred16_t __p
)
14042 uint8x16_t __arg1
= __arm_vuninitializedq_u8 ();
14043 uint8x16_t __res
= __builtin_mve_vddupq_m_n_uv16qi (__arg1
, * __a
, __imm
, __p
);
14044 *__a
-= __imm
* 16u;
14048 __extension__
extern __inline uint16x8_t
14049 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14050 __arm_vddupq_x_wb_u16 (uint32_t *__a
, const int __imm
, mve_pred16_t __p
)
14052 uint16x8_t __arg1
= __arm_vuninitializedq_u16 ();
14053 uint16x8_t __res
= __builtin_mve_vddupq_m_n_uv8hi (__arg1
, *__a
, __imm
, __p
);
14054 *__a
-= __imm
* 8u;
14058 __extension__
extern __inline uint32x4_t
14059 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14060 __arm_vddupq_x_wb_u32 (uint32_t *__a
, const int __imm
, mve_pred16_t __p
)
14062 uint32x4_t __arg1
= __arm_vuninitializedq_u32 ();
14063 uint32x4_t __res
= __builtin_mve_vddupq_m_n_uv4si (__arg1
, *__a
, __imm
, __p
);
14064 *__a
-= __imm
* 4u;
14068 __extension__
extern __inline uint8x16_t
14069 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14070 __arm_vdwdupq_x_n_u8 (uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
14072 uint64_t __c
= ((uint64_t) __b
) << 32;
14073 return __builtin_mve_vdwdupq_m_n_uv16qi (__arm_vuninitializedq_u8 (), __a
, __c
, __imm
, __p
);
14076 __extension__
extern __inline uint16x8_t
14077 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14078 __arm_vdwdupq_x_n_u16 (uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
14080 uint64_t __c
= ((uint64_t) __b
) << 32;
14081 return __builtin_mve_vdwdupq_m_n_uv8hi (__arm_vuninitializedq_u16 (), __a
, __c
, __imm
, __p
);
14084 __extension__
extern __inline uint32x4_t
14085 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14086 __arm_vdwdupq_x_n_u32 (uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
14088 uint64_t __c
= ((uint64_t) __b
) << 32;
14089 return __builtin_mve_vdwdupq_m_n_uv4si (__arm_vuninitializedq_u32 (), __a
, __c
, __imm
, __p
);
14092 __extension__
extern __inline uint8x16_t
14093 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14094 __arm_vdwdupq_x_wb_u8 (uint32_t *__a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
14096 uint64_t __c
= ((uint64_t) __b
) << 32;
14097 uint8x16_t __arg1
= __arm_vuninitializedq_u8 ();
14098 uint8x16_t __res
= __builtin_mve_vdwdupq_m_n_uv16qi (__arg1
, *__a
, __c
, __imm
, __p
);
14099 *__a
= __builtin_mve_vdwdupq_m_wb_uv16qi (__arg1
, *__a
, __c
, __imm
, __p
);
14103 __extension__
extern __inline uint16x8_t
14104 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14105 __arm_vdwdupq_x_wb_u16 (uint32_t *__a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
14107 uint64_t __c
= ((uint64_t) __b
) << 32;
14108 uint16x8_t __arg1
= __arm_vuninitializedq_u16 ();
14109 uint16x8_t __res
= __builtin_mve_vdwdupq_m_n_uv8hi (__arg1
, *__a
, __c
, __imm
, __p
);
14110 *__a
= __builtin_mve_vdwdupq_m_wb_uv8hi (__arg1
, *__a
, __c
, __imm
, __p
);
14114 __extension__
extern __inline uint32x4_t
14115 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14116 __arm_vdwdupq_x_wb_u32 (uint32_t *__a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
14118 uint64_t __c
= ((uint64_t) __b
) << 32;
14119 uint32x4_t __arg1
= __arm_vuninitializedq_u32 ();
14120 uint32x4_t __res
= __builtin_mve_vdwdupq_m_n_uv4si (__arg1
, *__a
, __c
, __imm
, __p
);
14121 *__a
= __builtin_mve_vdwdupq_m_wb_uv4si (__arg1
, *__a
, __c
, __imm
, __p
);
14125 __extension__
extern __inline uint8x16_t
14126 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14127 __arm_vidupq_x_n_u8 (uint32_t __a
, const int __imm
, mve_pred16_t __p
)
14129 return __builtin_mve_vidupq_m_n_uv16qi (__arm_vuninitializedq_u8 (), __a
, __imm
, __p
);
14132 __extension__
extern __inline uint16x8_t
14133 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14134 __arm_vidupq_x_n_u16 (uint32_t __a
, const int __imm
, mve_pred16_t __p
)
14136 return __builtin_mve_vidupq_m_n_uv8hi (__arm_vuninitializedq_u16 (), __a
, __imm
, __p
);
14139 __extension__
extern __inline uint32x4_t
14140 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14141 __arm_vidupq_x_n_u32 (uint32_t __a
, const int __imm
, mve_pred16_t __p
)
14143 return __builtin_mve_vidupq_m_n_uv4si (__arm_vuninitializedq_u32 (), __a
, __imm
, __p
);
14146 __extension__
extern __inline uint8x16_t
14147 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14148 __arm_vidupq_x_wb_u8 (uint32_t *__a
, const int __imm
, mve_pred16_t __p
)
14150 uint8x16_t __arg1
= __arm_vuninitializedq_u8 ();
14151 uint8x16_t __res
= __builtin_mve_vidupq_m_n_uv16qi (__arg1
, *__a
, __imm
, __p
);
14152 *__a
+= __imm
* 16u;
14156 __extension__
extern __inline uint16x8_t
14157 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14158 __arm_vidupq_x_wb_u16 (uint32_t *__a
, const int __imm
, mve_pred16_t __p
)
14160 uint16x8_t __arg1
= __arm_vuninitializedq_u16 ();
14161 uint16x8_t __res
= __builtin_mve_vidupq_m_n_uv8hi (__arg1
, *__a
, __imm
, __p
);
14162 *__a
+= __imm
* 8u;
14166 __extension__
extern __inline uint32x4_t
14167 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14168 __arm_vidupq_x_wb_u32 (uint32_t *__a
, const int __imm
, mve_pred16_t __p
)
14170 uint32x4_t __arg1
= __arm_vuninitializedq_u32 ();
14171 uint32x4_t __res
= __builtin_mve_vidupq_m_n_uv4si (__arg1
, *__a
, __imm
, __p
);
14172 *__a
+= __imm
* 4u;
14176 __extension__
extern __inline uint8x16_t
14177 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14178 __arm_viwdupq_x_n_u8 (uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
14180 uint64_t __c
= ((uint64_t) __b
) << 32;
14181 return __builtin_mve_viwdupq_m_n_uv16qi (__arm_vuninitializedq_u8 (), __a
, __c
, __imm
, __p
);
14184 __extension__
extern __inline uint16x8_t
14185 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14186 __arm_viwdupq_x_n_u16 (uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
14188 uint64_t __c
= ((uint64_t) __b
) << 32;
14189 return __builtin_mve_viwdupq_m_n_uv8hi (__arm_vuninitializedq_u16 (), __a
, __c
, __imm
, __p
);
14192 __extension__
extern __inline uint32x4_t
14193 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14194 __arm_viwdupq_x_n_u32 (uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
14196 uint64_t __c
= ((uint64_t) __b
) << 32;
14197 return __builtin_mve_viwdupq_m_n_uv4si (__arm_vuninitializedq_u32 (), __a
, __c
, __imm
, __p
);
14200 __extension__
extern __inline uint8x16_t
14201 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14202 __arm_viwdupq_x_wb_u8 (uint32_t *__a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
14204 uint64_t __c
= ((uint64_t) __b
) << 32;
14205 uint8x16_t __arg1
= __arm_vuninitializedq_u8 ();
14206 uint8x16_t __res
= __builtin_mve_viwdupq_m_n_uv16qi (__arg1
, *__a
, __c
, __imm
, __p
);
14207 *__a
= __builtin_mve_viwdupq_m_wb_uv16qi (__arg1
, *__a
, __c
, __imm
, __p
);
14211 __extension__
extern __inline uint16x8_t
14212 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14213 __arm_viwdupq_x_wb_u16 (uint32_t *__a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
14215 uint64_t __c
= ((uint64_t) __b
) << 32;
14216 uint16x8_t __arg1
= __arm_vuninitializedq_u16 ();
14217 uint16x8_t __res
= __builtin_mve_viwdupq_m_n_uv8hi (__arg1
, *__a
, __c
, __imm
, __p
);
14218 *__a
= __builtin_mve_viwdupq_m_wb_uv8hi (__arg1
, *__a
, __c
, __imm
, __p
);
14222 __extension__
extern __inline uint32x4_t
14223 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14224 __arm_viwdupq_x_wb_u32 (uint32_t *__a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
14226 uint64_t __c
= ((uint64_t) __b
) << 32;
14227 uint32x4_t __arg1
= __arm_vuninitializedq_u32 ();
14228 uint32x4_t __res
= __builtin_mve_viwdupq_m_n_uv4si (__arg1
, *__a
, __c
, __imm
, __p
);
14229 *__a
= __builtin_mve_viwdupq_m_wb_uv4si (__arg1
, *__a
, __c
, __imm
, __p
);
14233 __extension__
extern __inline int8x16_t
14234 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14235 __arm_vdupq_x_n_s8 (int8_t __a
, mve_pred16_t __p
)
14237 return __builtin_mve_vdupq_m_n_sv16qi (__arm_vuninitializedq_s8 (), __a
, __p
);
14240 __extension__
extern __inline int16x8_t
14241 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14242 __arm_vdupq_x_n_s16 (int16_t __a
, mve_pred16_t __p
)
14244 return __builtin_mve_vdupq_m_n_sv8hi (__arm_vuninitializedq_s16 (), __a
, __p
);
14247 __extension__
extern __inline int32x4_t
14248 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14249 __arm_vdupq_x_n_s32 (int32_t __a
, mve_pred16_t __p
)
14251 return __builtin_mve_vdupq_m_n_sv4si (__arm_vuninitializedq_s32 (), __a
, __p
);
14254 __extension__
extern __inline uint8x16_t
14255 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14256 __arm_vdupq_x_n_u8 (uint8_t __a
, mve_pred16_t __p
)
14258 return __builtin_mve_vdupq_m_n_uv16qi (__arm_vuninitializedq_u8 (), __a
, __p
);
14261 __extension__
extern __inline uint16x8_t
14262 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14263 __arm_vdupq_x_n_u16 (uint16_t __a
, mve_pred16_t __p
)
14265 return __builtin_mve_vdupq_m_n_uv8hi (__arm_vuninitializedq_u16 (), __a
, __p
);
14268 __extension__
extern __inline uint32x4_t
14269 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14270 __arm_vdupq_x_n_u32 (uint32_t __a
, mve_pred16_t __p
)
14272 return __builtin_mve_vdupq_m_n_uv4si (__arm_vuninitializedq_u32 (), __a
, __p
);
14275 __extension__
extern __inline int8x16_t
14276 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14277 __arm_vminq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
14279 return __builtin_mve_vminq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
14282 __extension__
extern __inline int16x8_t
14283 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14284 __arm_vminq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
14286 return __builtin_mve_vminq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
14289 __extension__
extern __inline int32x4_t
14290 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14291 __arm_vminq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
14293 return __builtin_mve_vminq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
14296 __extension__
extern __inline uint8x16_t
14297 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14298 __arm_vminq_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
14300 return __builtin_mve_vminq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
14303 __extension__
extern __inline uint16x8_t
14304 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14305 __arm_vminq_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
14307 return __builtin_mve_vminq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
14310 __extension__
extern __inline uint32x4_t
14311 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14312 __arm_vminq_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
14314 return __builtin_mve_vminq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
14317 __extension__
extern __inline int8x16_t
14318 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14319 __arm_vmaxq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
14321 return __builtin_mve_vmaxq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
14324 __extension__
extern __inline int16x8_t
14325 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14326 __arm_vmaxq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
14328 return __builtin_mve_vmaxq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
14331 __extension__
extern __inline int32x4_t
14332 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14333 __arm_vmaxq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
14335 return __builtin_mve_vmaxq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
14338 __extension__
extern __inline uint8x16_t
14339 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14340 __arm_vmaxq_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
14342 return __builtin_mve_vmaxq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
14345 __extension__
extern __inline uint16x8_t
14346 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14347 __arm_vmaxq_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
14349 return __builtin_mve_vmaxq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
14352 __extension__
extern __inline uint32x4_t
14353 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14354 __arm_vmaxq_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
14356 return __builtin_mve_vmaxq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
14359 __extension__
extern __inline int8x16_t
14360 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14361 __arm_vabdq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
14363 return __builtin_mve_vabdq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
14366 __extension__
extern __inline int16x8_t
14367 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14368 __arm_vabdq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
14370 return __builtin_mve_vabdq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
14373 __extension__
extern __inline int32x4_t
14374 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14375 __arm_vabdq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
14377 return __builtin_mve_vabdq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
14380 __extension__
extern __inline uint8x16_t
14381 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14382 __arm_vabdq_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
14384 return __builtin_mve_vabdq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
14387 __extension__
extern __inline uint16x8_t
14388 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14389 __arm_vabdq_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
14391 return __builtin_mve_vabdq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
14394 __extension__
extern __inline uint32x4_t
14395 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14396 __arm_vabdq_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
14398 return __builtin_mve_vabdq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
14401 __extension__
extern __inline int8x16_t
14402 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14403 __arm_vabsq_x_s8 (int8x16_t __a
, mve_pred16_t __p
)
14405 return __builtin_mve_vabsq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __p
);
14408 __extension__
extern __inline int16x8_t
14409 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14410 __arm_vabsq_x_s16 (int16x8_t __a
, mve_pred16_t __p
)
14412 return __builtin_mve_vabsq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __p
);
14415 __extension__
extern __inline int32x4_t
14416 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14417 __arm_vabsq_x_s32 (int32x4_t __a
, mve_pred16_t __p
)
14419 return __builtin_mve_vabsq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __p
);
14422 __extension__
extern __inline int8x16_t
14423 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14424 __arm_vaddq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
14426 return __builtin_mve_vaddq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
14429 __extension__
extern __inline int16x8_t
14430 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14431 __arm_vaddq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
14433 return __builtin_mve_vaddq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
14436 __extension__
extern __inline int32x4_t
14437 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14438 __arm_vaddq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
14440 return __builtin_mve_vaddq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
14443 __extension__
extern __inline int8x16_t
14444 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14445 __arm_vaddq_x_n_s8 (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
14447 return __builtin_mve_vaddq_m_n_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
14450 __extension__
extern __inline int16x8_t
14451 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14452 __arm_vaddq_x_n_s16 (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
14454 return __builtin_mve_vaddq_m_n_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
14457 __extension__
extern __inline int32x4_t
14458 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14459 __arm_vaddq_x_n_s32 (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
14461 return __builtin_mve_vaddq_m_n_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
14464 __extension__
extern __inline uint8x16_t
14465 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14466 __arm_vaddq_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
14468 return __builtin_mve_vaddq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
14471 __extension__
extern __inline uint16x8_t
14472 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14473 __arm_vaddq_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
14475 return __builtin_mve_vaddq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
14478 __extension__
extern __inline uint32x4_t
14479 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14480 __arm_vaddq_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
14482 return __builtin_mve_vaddq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
14485 __extension__
extern __inline uint8x16_t
14486 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14487 __arm_vaddq_x_n_u8 (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
14489 return __builtin_mve_vaddq_m_n_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
14492 __extension__
extern __inline uint16x8_t
14493 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14494 __arm_vaddq_x_n_u16 (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
14496 return __builtin_mve_vaddq_m_n_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
14499 __extension__
extern __inline uint32x4_t
14500 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14501 __arm_vaddq_x_n_u32 (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
14503 return __builtin_mve_vaddq_m_n_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
14506 __extension__
extern __inline int8x16_t
14507 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14508 __arm_vclsq_x_s8 (int8x16_t __a
, mve_pred16_t __p
)
14510 return __builtin_mve_vclsq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __p
);
14513 __extension__
extern __inline int16x8_t
14514 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14515 __arm_vclsq_x_s16 (int16x8_t __a
, mve_pred16_t __p
)
14517 return __builtin_mve_vclsq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __p
);
14520 __extension__
extern __inline int32x4_t
14521 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14522 __arm_vclsq_x_s32 (int32x4_t __a
, mve_pred16_t __p
)
14524 return __builtin_mve_vclsq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __p
);
14527 __extension__
extern __inline int8x16_t
14528 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14529 __arm_vclzq_x_s8 (int8x16_t __a
, mve_pred16_t __p
)
14531 return __builtin_mve_vclzq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __p
);
14534 __extension__
extern __inline int16x8_t
14535 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14536 __arm_vclzq_x_s16 (int16x8_t __a
, mve_pred16_t __p
)
14538 return __builtin_mve_vclzq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __p
);
14541 __extension__
extern __inline int32x4_t
14542 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14543 __arm_vclzq_x_s32 (int32x4_t __a
, mve_pred16_t __p
)
14545 return __builtin_mve_vclzq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __p
);
14548 __extension__
extern __inline uint8x16_t
14549 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14550 __arm_vclzq_x_u8 (uint8x16_t __a
, mve_pred16_t __p
)
14552 return __builtin_mve_vclzq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __p
);
14555 __extension__
extern __inline uint16x8_t
14556 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14557 __arm_vclzq_x_u16 (uint16x8_t __a
, mve_pred16_t __p
)
14559 return __builtin_mve_vclzq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __p
);
14562 __extension__
extern __inline uint32x4_t
14563 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14564 __arm_vclzq_x_u32 (uint32x4_t __a
, mve_pred16_t __p
)
14566 return __builtin_mve_vclzq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __p
);
14569 __extension__
extern __inline int8x16_t
14570 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14571 __arm_vnegq_x_s8 (int8x16_t __a
, mve_pred16_t __p
)
14573 return __builtin_mve_vnegq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __p
);
14576 __extension__
extern __inline int16x8_t
14577 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14578 __arm_vnegq_x_s16 (int16x8_t __a
, mve_pred16_t __p
)
14580 return __builtin_mve_vnegq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __p
);
14583 __extension__
extern __inline int32x4_t
14584 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14585 __arm_vnegq_x_s32 (int32x4_t __a
, mve_pred16_t __p
)
14587 return __builtin_mve_vnegq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __p
);
14590 __extension__
extern __inline int8x16_t
14591 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14592 __arm_vmulhq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
14594 return __builtin_mve_vmulhq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
14597 __extension__
extern __inline int16x8_t
14598 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14599 __arm_vmulhq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
14601 return __builtin_mve_vmulhq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
14604 __extension__
extern __inline int32x4_t
14605 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14606 __arm_vmulhq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
14608 return __builtin_mve_vmulhq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
14611 __extension__
extern __inline uint8x16_t
14612 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14613 __arm_vmulhq_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
14615 return __builtin_mve_vmulhq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
14618 __extension__
extern __inline uint16x8_t
14619 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14620 __arm_vmulhq_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
14622 return __builtin_mve_vmulhq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
14625 __extension__
extern __inline uint32x4_t
14626 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14627 __arm_vmulhq_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
14629 return __builtin_mve_vmulhq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
14632 __extension__
extern __inline uint16x8_t
14633 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14634 __arm_vmullbq_poly_x_p8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
14636 return __builtin_mve_vmullbq_poly_m_pv16qi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
14639 __extension__
extern __inline uint32x4_t
14640 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14641 __arm_vmullbq_poly_x_p16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
14643 return __builtin_mve_vmullbq_poly_m_pv8hi (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
14646 __extension__
extern __inline int16x8_t
14647 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14648 __arm_vmullbq_int_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
14650 return __builtin_mve_vmullbq_int_m_sv16qi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
14653 __extension__
extern __inline int32x4_t
14654 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14655 __arm_vmullbq_int_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
14657 return __builtin_mve_vmullbq_int_m_sv8hi (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
14660 __extension__
extern __inline int64x2_t
14661 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14662 __arm_vmullbq_int_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
14664 return __builtin_mve_vmullbq_int_m_sv4si (__arm_vuninitializedq_s64 (), __a
, __b
, __p
);
14667 __extension__
extern __inline uint16x8_t
14668 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14669 __arm_vmullbq_int_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
14671 return __builtin_mve_vmullbq_int_m_uv16qi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
14674 __extension__
extern __inline uint32x4_t
14675 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14676 __arm_vmullbq_int_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
14678 return __builtin_mve_vmullbq_int_m_uv8hi (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
14681 __extension__
extern __inline uint64x2_t
14682 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14683 __arm_vmullbq_int_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
14685 return __builtin_mve_vmullbq_int_m_uv4si (__arm_vuninitializedq_u64 (), __a
, __b
, __p
);
14688 __extension__
extern __inline uint16x8_t
14689 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14690 __arm_vmulltq_poly_x_p8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
14692 return __builtin_mve_vmulltq_poly_m_pv16qi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
14695 __extension__
extern __inline uint32x4_t
14696 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14697 __arm_vmulltq_poly_x_p16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
14699 return __builtin_mve_vmulltq_poly_m_pv8hi (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
14702 __extension__
extern __inline int16x8_t
14703 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14704 __arm_vmulltq_int_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
14706 return __builtin_mve_vmulltq_int_m_sv16qi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
14709 __extension__
extern __inline int32x4_t
14710 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14711 __arm_vmulltq_int_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
14713 return __builtin_mve_vmulltq_int_m_sv8hi (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
14716 __extension__
extern __inline int64x2_t
14717 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14718 __arm_vmulltq_int_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
14720 return __builtin_mve_vmulltq_int_m_sv4si (__arm_vuninitializedq_s64 (), __a
, __b
, __p
);
14723 __extension__
extern __inline uint16x8_t
14724 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14725 __arm_vmulltq_int_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
14727 return __builtin_mve_vmulltq_int_m_uv16qi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
14730 __extension__
extern __inline uint32x4_t
14731 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14732 __arm_vmulltq_int_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
14734 return __builtin_mve_vmulltq_int_m_uv8hi (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
14737 __extension__
extern __inline uint64x2_t
14738 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14739 __arm_vmulltq_int_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
14741 return __builtin_mve_vmulltq_int_m_uv4si (__arm_vuninitializedq_u64 (), __a
, __b
, __p
);
14744 __extension__
extern __inline int8x16_t
14745 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14746 __arm_vmulq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
14748 return __builtin_mve_vmulq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
14751 __extension__
extern __inline int16x8_t
14752 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14753 __arm_vmulq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
14755 return __builtin_mve_vmulq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
14758 __extension__
extern __inline int32x4_t
14759 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14760 __arm_vmulq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
14762 return __builtin_mve_vmulq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
14765 __extension__
extern __inline int8x16_t
14766 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14767 __arm_vmulq_x_n_s8 (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
14769 return __builtin_mve_vmulq_m_n_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
14772 __extension__
extern __inline int16x8_t
14773 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14774 __arm_vmulq_x_n_s16 (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
14776 return __builtin_mve_vmulq_m_n_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
14779 __extension__
extern __inline int32x4_t
14780 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14781 __arm_vmulq_x_n_s32 (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
14783 return __builtin_mve_vmulq_m_n_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
14786 __extension__
extern __inline uint8x16_t
14787 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14788 __arm_vmulq_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
14790 return __builtin_mve_vmulq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
14793 __extension__
extern __inline uint16x8_t
14794 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14795 __arm_vmulq_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
14797 return __builtin_mve_vmulq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
14800 __extension__
extern __inline uint32x4_t
14801 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14802 __arm_vmulq_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
14804 return __builtin_mve_vmulq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
14807 __extension__
extern __inline uint8x16_t
14808 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14809 __arm_vmulq_x_n_u8 (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
14811 return __builtin_mve_vmulq_m_n_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
14814 __extension__
extern __inline uint16x8_t
14815 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14816 __arm_vmulq_x_n_u16 (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
14818 return __builtin_mve_vmulq_m_n_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
14821 __extension__
extern __inline uint32x4_t
14822 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14823 __arm_vmulq_x_n_u32 (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
14825 return __builtin_mve_vmulq_m_n_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
14828 __extension__
extern __inline int8x16_t
14829 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14830 __arm_vsubq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
14832 return __builtin_mve_vsubq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
14835 __extension__
extern __inline int16x8_t
14836 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14837 __arm_vsubq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
14839 return __builtin_mve_vsubq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
14842 __extension__
extern __inline int32x4_t
14843 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14844 __arm_vsubq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
14846 return __builtin_mve_vsubq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
14849 __extension__
extern __inline int8x16_t
14850 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14851 __arm_vsubq_x_n_s8 (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
14853 return __builtin_mve_vsubq_m_n_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
14856 __extension__
extern __inline int16x8_t
14857 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14858 __arm_vsubq_x_n_s16 (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
14860 return __builtin_mve_vsubq_m_n_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
14863 __extension__
extern __inline int32x4_t
14864 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14865 __arm_vsubq_x_n_s32 (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
14867 return __builtin_mve_vsubq_m_n_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
14870 __extension__
extern __inline uint8x16_t
14871 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14872 __arm_vsubq_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
14874 return __builtin_mve_vsubq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
14877 __extension__
extern __inline uint16x8_t
14878 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14879 __arm_vsubq_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
14881 return __builtin_mve_vsubq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
14884 __extension__
extern __inline uint32x4_t
14885 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14886 __arm_vsubq_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
14888 return __builtin_mve_vsubq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
14891 __extension__
extern __inline uint8x16_t
14892 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14893 __arm_vsubq_x_n_u8 (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
14895 return __builtin_mve_vsubq_m_n_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
14898 __extension__
extern __inline uint16x8_t
14899 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14900 __arm_vsubq_x_n_u16 (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
14902 return __builtin_mve_vsubq_m_n_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
14905 __extension__
extern __inline uint32x4_t
14906 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14907 __arm_vsubq_x_n_u32 (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
14909 return __builtin_mve_vsubq_m_n_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
14912 __extension__
extern __inline int8x16_t
14913 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14914 __arm_vcaddq_rot90_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
14916 return __builtin_mve_vcaddq_rot90_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
14919 __extension__
extern __inline int16x8_t
14920 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14921 __arm_vcaddq_rot90_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
14923 return __builtin_mve_vcaddq_rot90_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
14926 __extension__
extern __inline int32x4_t
14927 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14928 __arm_vcaddq_rot90_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
14930 return __builtin_mve_vcaddq_rot90_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
14933 __extension__
extern __inline uint8x16_t
14934 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14935 __arm_vcaddq_rot90_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
14937 return __builtin_mve_vcaddq_rot90_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
14940 __extension__
extern __inline uint16x8_t
14941 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14942 __arm_vcaddq_rot90_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
14944 return __builtin_mve_vcaddq_rot90_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
14947 __extension__
extern __inline uint32x4_t
14948 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14949 __arm_vcaddq_rot90_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
14951 return __builtin_mve_vcaddq_rot90_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
14954 __extension__
extern __inline int8x16_t
14955 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14956 __arm_vcaddq_rot270_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
14958 return __builtin_mve_vcaddq_rot270_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
14961 __extension__
extern __inline int16x8_t
14962 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14963 __arm_vcaddq_rot270_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
14965 return __builtin_mve_vcaddq_rot270_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
14968 __extension__
extern __inline int32x4_t
14969 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14970 __arm_vcaddq_rot270_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
14972 return __builtin_mve_vcaddq_rot270_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
14975 __extension__
extern __inline uint8x16_t
14976 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14977 __arm_vcaddq_rot270_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
14979 return __builtin_mve_vcaddq_rot270_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
14982 __extension__
extern __inline uint16x8_t
14983 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14984 __arm_vcaddq_rot270_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
14986 return __builtin_mve_vcaddq_rot270_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
14989 __extension__
extern __inline uint32x4_t
14990 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14991 __arm_vcaddq_rot270_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
14993 return __builtin_mve_vcaddq_rot270_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
14996 __extension__
extern __inline int8x16_t
14997 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
14998 __arm_vhaddq_x_n_s8 (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
15000 return __builtin_mve_vhaddq_m_n_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
15003 __extension__
extern __inline int16x8_t
15004 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15005 __arm_vhaddq_x_n_s16 (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
15007 return __builtin_mve_vhaddq_m_n_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
15010 __extension__
extern __inline int32x4_t
15011 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15012 __arm_vhaddq_x_n_s32 (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
15014 return __builtin_mve_vhaddq_m_n_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
15017 __extension__
extern __inline uint8x16_t
15018 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15019 __arm_vhaddq_x_n_u8 (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
15021 return __builtin_mve_vhaddq_m_n_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
15024 __extension__
extern __inline uint16x8_t
15025 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15026 __arm_vhaddq_x_n_u16 (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
15028 return __builtin_mve_vhaddq_m_n_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
15031 __extension__
extern __inline uint32x4_t
15032 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15033 __arm_vhaddq_x_n_u32 (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
15035 return __builtin_mve_vhaddq_m_n_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
15038 __extension__
extern __inline int8x16_t
15039 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15040 __arm_vhaddq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
15042 return __builtin_mve_vhaddq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
15045 __extension__
extern __inline int16x8_t
15046 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15047 __arm_vhaddq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
15049 return __builtin_mve_vhaddq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
15052 __extension__
extern __inline int32x4_t
15053 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15054 __arm_vhaddq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
15056 return __builtin_mve_vhaddq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
15059 __extension__
extern __inline uint8x16_t
15060 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15061 __arm_vhaddq_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
15063 return __builtin_mve_vhaddq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
15066 __extension__
extern __inline uint16x8_t
15067 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15068 __arm_vhaddq_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
15070 return __builtin_mve_vhaddq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
15073 __extension__
extern __inline uint32x4_t
15074 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15075 __arm_vhaddq_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
15077 return __builtin_mve_vhaddq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
15080 __extension__
extern __inline int8x16_t
15081 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15082 __arm_vhcaddq_rot90_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
15084 return __builtin_mve_vhcaddq_rot90_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
15087 __extension__
extern __inline int16x8_t
15088 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15089 __arm_vhcaddq_rot90_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
15091 return __builtin_mve_vhcaddq_rot90_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
15094 __extension__
extern __inline int32x4_t
15095 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15096 __arm_vhcaddq_rot90_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
15098 return __builtin_mve_vhcaddq_rot90_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
15101 __extension__
extern __inline int8x16_t
15102 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15103 __arm_vhcaddq_rot270_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
15105 return __builtin_mve_vhcaddq_rot270_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
15108 __extension__
extern __inline int16x8_t
15109 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15110 __arm_vhcaddq_rot270_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
15112 return __builtin_mve_vhcaddq_rot270_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
15115 __extension__
extern __inline int32x4_t
15116 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15117 __arm_vhcaddq_rot270_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
15119 return __builtin_mve_vhcaddq_rot270_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
15122 __extension__
extern __inline int8x16_t
15123 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15124 __arm_vhsubq_x_n_s8 (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
15126 return __builtin_mve_vhsubq_m_n_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
15129 __extension__
extern __inline int16x8_t
15130 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15131 __arm_vhsubq_x_n_s16 (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
15133 return __builtin_mve_vhsubq_m_n_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
15136 __extension__
extern __inline int32x4_t
15137 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15138 __arm_vhsubq_x_n_s32 (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
15140 return __builtin_mve_vhsubq_m_n_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
15143 __extension__
extern __inline uint8x16_t
15144 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15145 __arm_vhsubq_x_n_u8 (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
15147 return __builtin_mve_vhsubq_m_n_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
15150 __extension__
extern __inline uint16x8_t
15151 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15152 __arm_vhsubq_x_n_u16 (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
15154 return __builtin_mve_vhsubq_m_n_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
15157 __extension__
extern __inline uint32x4_t
15158 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15159 __arm_vhsubq_x_n_u32 (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
15161 return __builtin_mve_vhsubq_m_n_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
15164 __extension__
extern __inline int8x16_t
15165 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15166 __arm_vhsubq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
15168 return __builtin_mve_vhsubq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
15171 __extension__
extern __inline int16x8_t
15172 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15173 __arm_vhsubq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
15175 return __builtin_mve_vhsubq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
15178 __extension__
extern __inline int32x4_t
15179 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15180 __arm_vhsubq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
15182 return __builtin_mve_vhsubq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
15185 __extension__
extern __inline uint8x16_t
15186 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15187 __arm_vhsubq_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
15189 return __builtin_mve_vhsubq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
15192 __extension__
extern __inline uint16x8_t
15193 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15194 __arm_vhsubq_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
15196 return __builtin_mve_vhsubq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
15199 __extension__
extern __inline uint32x4_t
15200 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15201 __arm_vhsubq_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
15203 return __builtin_mve_vhsubq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
15206 __extension__
extern __inline int8x16_t
15207 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15208 __arm_vrhaddq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
15210 return __builtin_mve_vrhaddq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
15213 __extension__
extern __inline int16x8_t
15214 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15215 __arm_vrhaddq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
15217 return __builtin_mve_vrhaddq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
15220 __extension__
extern __inline int32x4_t
15221 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15222 __arm_vrhaddq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
15224 return __builtin_mve_vrhaddq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
15227 __extension__
extern __inline uint8x16_t
15228 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15229 __arm_vrhaddq_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
15231 return __builtin_mve_vrhaddq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
15234 __extension__
extern __inline uint16x8_t
15235 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15236 __arm_vrhaddq_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
15238 return __builtin_mve_vrhaddq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
15241 __extension__
extern __inline uint32x4_t
15242 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15243 __arm_vrhaddq_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
15245 return __builtin_mve_vrhaddq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
15248 __extension__
extern __inline int8x16_t
15249 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15250 __arm_vrmulhq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
15252 return __builtin_mve_vrmulhq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
15255 __extension__
extern __inline int16x8_t
15256 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15257 __arm_vrmulhq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
15259 return __builtin_mve_vrmulhq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
15262 __extension__
extern __inline int32x4_t
15263 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15264 __arm_vrmulhq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
15266 return __builtin_mve_vrmulhq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
15269 __extension__
extern __inline uint8x16_t
15270 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15271 __arm_vrmulhq_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
15273 return __builtin_mve_vrmulhq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
15276 __extension__
extern __inline uint16x8_t
15277 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15278 __arm_vrmulhq_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
15280 return __builtin_mve_vrmulhq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
15283 __extension__
extern __inline uint32x4_t
15284 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15285 __arm_vrmulhq_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
15287 return __builtin_mve_vrmulhq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
15290 __extension__
extern __inline int8x16_t
15291 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15292 __arm_vandq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
15294 return __builtin_mve_vandq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
15297 __extension__
extern __inline int16x8_t
15298 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15299 __arm_vandq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
15301 return __builtin_mve_vandq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
15304 __extension__
extern __inline int32x4_t
15305 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15306 __arm_vandq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
15308 return __builtin_mve_vandq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
15311 __extension__
extern __inline uint8x16_t
15312 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15313 __arm_vandq_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
15315 return __builtin_mve_vandq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
15318 __extension__
extern __inline uint16x8_t
15319 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15320 __arm_vandq_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
15322 return __builtin_mve_vandq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
15325 __extension__
extern __inline uint32x4_t
15326 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15327 __arm_vandq_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
15329 return __builtin_mve_vandq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
15332 __extension__
extern __inline int8x16_t
15333 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15334 __arm_vbicq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
15336 return __builtin_mve_vbicq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
15339 __extension__
extern __inline int16x8_t
15340 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15341 __arm_vbicq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
15343 return __builtin_mve_vbicq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
15346 __extension__
extern __inline int32x4_t
15347 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15348 __arm_vbicq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
15350 return __builtin_mve_vbicq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
15353 __extension__
extern __inline uint8x16_t
15354 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15355 __arm_vbicq_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
15357 return __builtin_mve_vbicq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
15360 __extension__
extern __inline uint16x8_t
15361 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15362 __arm_vbicq_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
15364 return __builtin_mve_vbicq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
15367 __extension__
extern __inline uint32x4_t
15368 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15369 __arm_vbicq_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
15371 return __builtin_mve_vbicq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
15374 __extension__
extern __inline int8x16_t
15375 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15376 __arm_vbrsrq_x_n_s8 (int8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
15378 return __builtin_mve_vbrsrq_m_n_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
15381 __extension__
extern __inline int16x8_t
15382 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15383 __arm_vbrsrq_x_n_s16 (int16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
15385 return __builtin_mve_vbrsrq_m_n_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
15388 __extension__
extern __inline int32x4_t
15389 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15390 __arm_vbrsrq_x_n_s32 (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
15392 return __builtin_mve_vbrsrq_m_n_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
15395 __extension__
extern __inline uint8x16_t
15396 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15397 __arm_vbrsrq_x_n_u8 (uint8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
15399 return __builtin_mve_vbrsrq_m_n_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
15402 __extension__
extern __inline uint16x8_t
15403 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15404 __arm_vbrsrq_x_n_u16 (uint16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
15406 return __builtin_mve_vbrsrq_m_n_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
15409 __extension__
extern __inline uint32x4_t
15410 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15411 __arm_vbrsrq_x_n_u32 (uint32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
15413 return __builtin_mve_vbrsrq_m_n_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
15416 __extension__
extern __inline int8x16_t
15417 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15418 __arm_veorq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
15420 return __builtin_mve_veorq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
15423 __extension__
extern __inline int16x8_t
15424 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15425 __arm_veorq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
15427 return __builtin_mve_veorq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
15430 __extension__
extern __inline int32x4_t
15431 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15432 __arm_veorq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
15434 return __builtin_mve_veorq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
15437 __extension__
extern __inline uint8x16_t
15438 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15439 __arm_veorq_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
15441 return __builtin_mve_veorq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
15444 __extension__
extern __inline uint16x8_t
15445 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15446 __arm_veorq_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
15448 return __builtin_mve_veorq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
15451 __extension__
extern __inline uint32x4_t
15452 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15453 __arm_veorq_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
15455 return __builtin_mve_veorq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
15458 __extension__
extern __inline int16x8_t
15459 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15460 __arm_vmovlbq_x_s8 (int8x16_t __a
, mve_pred16_t __p
)
15462 return __builtin_mve_vmovlbq_m_sv16qi (__arm_vuninitializedq_s16 (), __a
, __p
);
15465 __extension__
extern __inline int32x4_t
15466 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15467 __arm_vmovlbq_x_s16 (int16x8_t __a
, mve_pred16_t __p
)
15469 return __builtin_mve_vmovlbq_m_sv8hi (__arm_vuninitializedq_s32 (), __a
, __p
);
15472 __extension__
extern __inline uint16x8_t
15473 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15474 __arm_vmovlbq_x_u8 (uint8x16_t __a
, mve_pred16_t __p
)
15476 return __builtin_mve_vmovlbq_m_uv16qi (__arm_vuninitializedq_u16 (), __a
, __p
);
15479 __extension__
extern __inline uint32x4_t
15480 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15481 __arm_vmovlbq_x_u16 (uint16x8_t __a
, mve_pred16_t __p
)
15483 return __builtin_mve_vmovlbq_m_uv8hi (__arm_vuninitializedq_u32 (), __a
, __p
);
15486 __extension__
extern __inline int16x8_t
15487 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15488 __arm_vmovltq_x_s8 (int8x16_t __a
, mve_pred16_t __p
)
15490 return __builtin_mve_vmovltq_m_sv16qi (__arm_vuninitializedq_s16 (), __a
, __p
);
15493 __extension__
extern __inline int32x4_t
15494 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15495 __arm_vmovltq_x_s16 (int16x8_t __a
, mve_pred16_t __p
)
15497 return __builtin_mve_vmovltq_m_sv8hi (__arm_vuninitializedq_s32 (), __a
, __p
);
15500 __extension__
extern __inline uint16x8_t
15501 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15502 __arm_vmovltq_x_u8 (uint8x16_t __a
, mve_pred16_t __p
)
15504 return __builtin_mve_vmovltq_m_uv16qi (__arm_vuninitializedq_u16 (), __a
, __p
);
15507 __extension__
extern __inline uint32x4_t
15508 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15509 __arm_vmovltq_x_u16 (uint16x8_t __a
, mve_pred16_t __p
)
15511 return __builtin_mve_vmovltq_m_uv8hi (__arm_vuninitializedq_u32 (), __a
, __p
);
15514 __extension__
extern __inline int8x16_t
15515 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15516 __arm_vmvnq_x_s8 (int8x16_t __a
, mve_pred16_t __p
)
15518 return __builtin_mve_vmvnq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __p
);
15521 __extension__
extern __inline int16x8_t
15522 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15523 __arm_vmvnq_x_s16 (int16x8_t __a
, mve_pred16_t __p
)
15525 return __builtin_mve_vmvnq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __p
);
15528 __extension__
extern __inline int32x4_t
15529 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15530 __arm_vmvnq_x_s32 (int32x4_t __a
, mve_pred16_t __p
)
15532 return __builtin_mve_vmvnq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __p
);
15535 __extension__
extern __inline uint8x16_t
15536 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15537 __arm_vmvnq_x_u8 (uint8x16_t __a
, mve_pred16_t __p
)
15539 return __builtin_mve_vmvnq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __p
);
15542 __extension__
extern __inline uint16x8_t
15543 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15544 __arm_vmvnq_x_u16 (uint16x8_t __a
, mve_pred16_t __p
)
15546 return __builtin_mve_vmvnq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __p
);
15549 __extension__
extern __inline uint32x4_t
15550 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15551 __arm_vmvnq_x_u32 (uint32x4_t __a
, mve_pred16_t __p
)
15553 return __builtin_mve_vmvnq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __p
);
15556 __extension__
extern __inline int16x8_t
15557 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15558 __arm_vmvnq_x_n_s16 (const int __imm
, mve_pred16_t __p
)
15560 return __builtin_mve_vmvnq_m_n_sv8hi (__arm_vuninitializedq_s16 (), __imm
, __p
);
15563 __extension__
extern __inline int32x4_t
15564 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15565 __arm_vmvnq_x_n_s32 (const int __imm
, mve_pred16_t __p
)
15567 return __builtin_mve_vmvnq_m_n_sv4si (__arm_vuninitializedq_s32 (), __imm
, __p
);
15570 __extension__
extern __inline uint16x8_t
15571 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15572 __arm_vmvnq_x_n_u16 (const int __imm
, mve_pred16_t __p
)
15574 return __builtin_mve_vmvnq_m_n_uv8hi (__arm_vuninitializedq_u16 (), __imm
, __p
);
15577 __extension__
extern __inline uint32x4_t
15578 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15579 __arm_vmvnq_x_n_u32 (const int __imm
, mve_pred16_t __p
)
15581 return __builtin_mve_vmvnq_m_n_uv4si (__arm_vuninitializedq_u32 (), __imm
, __p
);
15584 __extension__
extern __inline int8x16_t
15585 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15586 __arm_vornq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
15588 return __builtin_mve_vornq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
15591 __extension__
extern __inline int16x8_t
15592 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15593 __arm_vornq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
15595 return __builtin_mve_vornq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
15598 __extension__
extern __inline int32x4_t
15599 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15600 __arm_vornq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
15602 return __builtin_mve_vornq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
15605 __extension__
extern __inline uint8x16_t
15606 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15607 __arm_vornq_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
15609 return __builtin_mve_vornq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
15612 __extension__
extern __inline uint16x8_t
15613 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15614 __arm_vornq_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
15616 return __builtin_mve_vornq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
15619 __extension__
extern __inline uint32x4_t
15620 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15621 __arm_vornq_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
15623 return __builtin_mve_vornq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
15626 __extension__
extern __inline int8x16_t
15627 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15628 __arm_vorrq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
15630 return __builtin_mve_vorrq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
15633 __extension__
extern __inline int16x8_t
15634 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15635 __arm_vorrq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
15637 return __builtin_mve_vorrq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
15640 __extension__
extern __inline int32x4_t
15641 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15642 __arm_vorrq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
15644 return __builtin_mve_vorrq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
15647 __extension__
extern __inline uint8x16_t
15648 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15649 __arm_vorrq_x_u8 (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
15651 return __builtin_mve_vorrq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
15654 __extension__
extern __inline uint16x8_t
15655 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15656 __arm_vorrq_x_u16 (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
15658 return __builtin_mve_vorrq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
15661 __extension__
extern __inline uint32x4_t
15662 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15663 __arm_vorrq_x_u32 (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
15665 return __builtin_mve_vorrq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
15668 __extension__
extern __inline int8x16_t
15669 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15670 __arm_vrev16q_x_s8 (int8x16_t __a
, mve_pred16_t __p
)
15672 return __builtin_mve_vrev16q_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __p
);
15675 __extension__
extern __inline uint8x16_t
15676 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15677 __arm_vrev16q_x_u8 (uint8x16_t __a
, mve_pred16_t __p
)
15679 return __builtin_mve_vrev16q_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __p
);
15682 __extension__
extern __inline int8x16_t
15683 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15684 __arm_vrev32q_x_s8 (int8x16_t __a
, mve_pred16_t __p
)
15686 return __builtin_mve_vrev32q_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __p
);
15689 __extension__
extern __inline int16x8_t
15690 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15691 __arm_vrev32q_x_s16 (int16x8_t __a
, mve_pred16_t __p
)
15693 return __builtin_mve_vrev32q_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __p
);
15696 __extension__
extern __inline uint8x16_t
15697 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15698 __arm_vrev32q_x_u8 (uint8x16_t __a
, mve_pred16_t __p
)
15700 return __builtin_mve_vrev32q_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __p
);
15703 __extension__
extern __inline uint16x8_t
15704 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15705 __arm_vrev32q_x_u16 (uint16x8_t __a
, mve_pred16_t __p
)
15707 return __builtin_mve_vrev32q_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __p
);
15710 __extension__
extern __inline int8x16_t
15711 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15712 __arm_vrev64q_x_s8 (int8x16_t __a
, mve_pred16_t __p
)
15714 return __builtin_mve_vrev64q_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __p
);
15717 __extension__
extern __inline int16x8_t
15718 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15719 __arm_vrev64q_x_s16 (int16x8_t __a
, mve_pred16_t __p
)
15721 return __builtin_mve_vrev64q_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __p
);
15724 __extension__
extern __inline int32x4_t
15725 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15726 __arm_vrev64q_x_s32 (int32x4_t __a
, mve_pred16_t __p
)
15728 return __builtin_mve_vrev64q_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __p
);
15731 __extension__
extern __inline uint8x16_t
15732 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15733 __arm_vrev64q_x_u8 (uint8x16_t __a
, mve_pred16_t __p
)
15735 return __builtin_mve_vrev64q_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __p
);
15738 __extension__
extern __inline uint16x8_t
15739 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15740 __arm_vrev64q_x_u16 (uint16x8_t __a
, mve_pred16_t __p
)
15742 return __builtin_mve_vrev64q_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __p
);
15745 __extension__
extern __inline uint32x4_t
15746 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15747 __arm_vrev64q_x_u32 (uint32x4_t __a
, mve_pred16_t __p
)
15749 return __builtin_mve_vrev64q_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __p
);
15752 __extension__
extern __inline int8x16_t
15753 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15754 __arm_vrshlq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
15756 return __builtin_mve_vrshlq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
15759 __extension__
extern __inline int16x8_t
15760 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15761 __arm_vrshlq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
15763 return __builtin_mve_vrshlq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
15766 __extension__
extern __inline int32x4_t
15767 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15768 __arm_vrshlq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
15770 return __builtin_mve_vrshlq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
15773 __extension__
extern __inline uint8x16_t
15774 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15775 __arm_vrshlq_x_u8 (uint8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
15777 return __builtin_mve_vrshlq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
15780 __extension__
extern __inline uint16x8_t
15781 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15782 __arm_vrshlq_x_u16 (uint16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
15784 return __builtin_mve_vrshlq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
15787 __extension__
extern __inline uint32x4_t
15788 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15789 __arm_vrshlq_x_u32 (uint32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
15791 return __builtin_mve_vrshlq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
15794 __extension__
extern __inline int16x8_t
15795 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15796 __arm_vshllbq_x_n_s8 (int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
15798 return __builtin_mve_vshllbq_m_n_sv16qi (__arm_vuninitializedq_s16 (), __a
, __imm
, __p
);
15801 __extension__
extern __inline int32x4_t
15802 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15803 __arm_vshllbq_x_n_s16 (int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
15805 return __builtin_mve_vshllbq_m_n_sv8hi (__arm_vuninitializedq_s32 (), __a
, __imm
, __p
);
15808 __extension__
extern __inline uint16x8_t
15809 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15810 __arm_vshllbq_x_n_u8 (uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
15812 return __builtin_mve_vshllbq_m_n_uv16qi (__arm_vuninitializedq_u16 (), __a
, __imm
, __p
);
15815 __extension__
extern __inline uint32x4_t
15816 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15817 __arm_vshllbq_x_n_u16 (uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
15819 return __builtin_mve_vshllbq_m_n_uv8hi (__arm_vuninitializedq_u32 (), __a
, __imm
, __p
);
15822 __extension__
extern __inline int16x8_t
15823 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15824 __arm_vshlltq_x_n_s8 (int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
15826 return __builtin_mve_vshlltq_m_n_sv16qi (__arm_vuninitializedq_s16 (), __a
, __imm
, __p
);
15829 __extension__
extern __inline int32x4_t
15830 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15831 __arm_vshlltq_x_n_s16 (int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
15833 return __builtin_mve_vshlltq_m_n_sv8hi (__arm_vuninitializedq_s32 (), __a
, __imm
, __p
);
15836 __extension__
extern __inline uint16x8_t
15837 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15838 __arm_vshlltq_x_n_u8 (uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
15840 return __builtin_mve_vshlltq_m_n_uv16qi (__arm_vuninitializedq_u16 (), __a
, __imm
, __p
);
15843 __extension__
extern __inline uint32x4_t
15844 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15845 __arm_vshlltq_x_n_u16 (uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
15847 return __builtin_mve_vshlltq_m_n_uv8hi (__arm_vuninitializedq_u32 (), __a
, __imm
, __p
);
15850 __extension__
extern __inline int8x16_t
15851 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15852 __arm_vshlq_x_s8 (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
15854 return __builtin_mve_vshlq_m_sv16qi (__arm_vuninitializedq_s8 (), __a
, __b
, __p
);
15857 __extension__
extern __inline int16x8_t
15858 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15859 __arm_vshlq_x_s16 (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
15861 return __builtin_mve_vshlq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __b
, __p
);
15864 __extension__
extern __inline int32x4_t
15865 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15866 __arm_vshlq_x_s32 (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
15868 return __builtin_mve_vshlq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __b
, __p
);
15871 __extension__
extern __inline uint8x16_t
15872 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15873 __arm_vshlq_x_u8 (uint8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
15875 return __builtin_mve_vshlq_m_uv16qi (__arm_vuninitializedq_u8 (), __a
, __b
, __p
);
15878 __extension__
extern __inline uint16x8_t
15879 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15880 __arm_vshlq_x_u16 (uint16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
15882 return __builtin_mve_vshlq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __b
, __p
);
15885 __extension__
extern __inline uint32x4_t
15886 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15887 __arm_vshlq_x_u32 (uint32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
15889 return __builtin_mve_vshlq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __b
, __p
);
15892 __extension__
extern __inline int8x16_t
15893 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15894 __arm_vshlq_x_n_s8 (int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
15896 return __builtin_mve_vshlq_m_n_sv16qi (__arm_vuninitializedq_s8 (), __a
, __imm
, __p
);
15899 __extension__
extern __inline int16x8_t
15900 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15901 __arm_vshlq_x_n_s16 (int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
15903 return __builtin_mve_vshlq_m_n_sv8hi (__arm_vuninitializedq_s16 (), __a
, __imm
, __p
);
15906 __extension__
extern __inline int32x4_t
15907 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15908 __arm_vshlq_x_n_s32 (int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
15910 return __builtin_mve_vshlq_m_n_sv4si (__arm_vuninitializedq_s32 (), __a
, __imm
, __p
);
15913 __extension__
extern __inline uint8x16_t
15914 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15915 __arm_vshlq_x_n_u8 (uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
15917 return __builtin_mve_vshlq_m_n_uv16qi (__arm_vuninitializedq_u8 (), __a
, __imm
, __p
);
15920 __extension__
extern __inline uint16x8_t
15921 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15922 __arm_vshlq_x_n_u16 (uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
15924 return __builtin_mve_vshlq_m_n_uv8hi (__arm_vuninitializedq_u16 (), __a
, __imm
, __p
);
15927 __extension__
extern __inline uint32x4_t
15928 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15929 __arm_vshlq_x_n_u32 (uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
15931 return __builtin_mve_vshlq_m_n_uv4si (__arm_vuninitializedq_u32 (), __a
, __imm
, __p
);
15934 __extension__
extern __inline int8x16_t
15935 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15936 __arm_vrshrq_x_n_s8 (int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
15938 return __builtin_mve_vrshrq_m_n_sv16qi (__arm_vuninitializedq_s8 (), __a
, __imm
, __p
);
15941 __extension__
extern __inline int16x8_t
15942 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15943 __arm_vrshrq_x_n_s16 (int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
15945 return __builtin_mve_vrshrq_m_n_sv8hi (__arm_vuninitializedq_s16 (), __a
, __imm
, __p
);
15948 __extension__
extern __inline int32x4_t
15949 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15950 __arm_vrshrq_x_n_s32 (int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
15952 return __builtin_mve_vrshrq_m_n_sv4si (__arm_vuninitializedq_s32 (), __a
, __imm
, __p
);
15955 __extension__
extern __inline uint8x16_t
15956 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15957 __arm_vrshrq_x_n_u8 (uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
15959 return __builtin_mve_vrshrq_m_n_uv16qi (__arm_vuninitializedq_u8 (), __a
, __imm
, __p
);
15962 __extension__
extern __inline uint16x8_t
15963 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15964 __arm_vrshrq_x_n_u16 (uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
15966 return __builtin_mve_vrshrq_m_n_uv8hi (__arm_vuninitializedq_u16 (), __a
, __imm
, __p
);
15969 __extension__
extern __inline uint32x4_t
15970 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15971 __arm_vrshrq_x_n_u32 (uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
15973 return __builtin_mve_vrshrq_m_n_uv4si (__arm_vuninitializedq_u32 (), __a
, __imm
, __p
);
15976 __extension__
extern __inline int8x16_t
15977 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15978 __arm_vshrq_x_n_s8 (int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
15980 return __builtin_mve_vshrq_m_n_sv16qi (__arm_vuninitializedq_s8 (), __a
, __imm
, __p
);
15983 __extension__
extern __inline int16x8_t
15984 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15985 __arm_vshrq_x_n_s16 (int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
15987 return __builtin_mve_vshrq_m_n_sv8hi (__arm_vuninitializedq_s16 (), __a
, __imm
, __p
);
15990 __extension__
extern __inline int32x4_t
15991 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15992 __arm_vshrq_x_n_s32 (int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
15994 return __builtin_mve_vshrq_m_n_sv4si (__arm_vuninitializedq_s32 (), __a
, __imm
, __p
);
15997 __extension__
extern __inline uint8x16_t
15998 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
15999 __arm_vshrq_x_n_u8 (uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
16001 return __builtin_mve_vshrq_m_n_uv16qi (__arm_vuninitializedq_u8 (), __a
, __imm
, __p
);
16004 __extension__
extern __inline uint16x8_t
16005 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16006 __arm_vshrq_x_n_u16 (uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
16008 return __builtin_mve_vshrq_m_n_uv8hi (__arm_vuninitializedq_u16 (), __a
, __imm
, __p
);
16011 __extension__
extern __inline uint32x4_t
16012 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16013 __arm_vshrq_x_n_u32 (uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
16015 return __builtin_mve_vshrq_m_n_uv4si (__arm_vuninitializedq_u32 (), __a
, __imm
, __p
);
16018 __extension__
extern __inline int32x4_t
16019 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16020 __arm_vadciq_s32 (int32x4_t __a
, int32x4_t __b
, unsigned * __carry_out
)
16022 int32x4_t __res
= __builtin_mve_vadciq_sv4si (__a
, __b
);
16023 *__carry_out
= (__builtin_arm_get_fpscr_nzcvqc () >> 29) & 0x1u
;
16027 __extension__
extern __inline uint32x4_t
16028 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16029 __arm_vadciq_u32 (uint32x4_t __a
, uint32x4_t __b
, unsigned * __carry_out
)
16031 uint32x4_t __res
= __builtin_mve_vadciq_uv4si (__a
, __b
);
16032 *__carry_out
= (__builtin_arm_get_fpscr_nzcvqc () >> 29) & 0x1u
;
16036 __extension__
extern __inline int32x4_t
16037 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16038 __arm_vadciq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, unsigned * __carry_out
, mve_pred16_t __p
)
16040 int32x4_t __res
= __builtin_mve_vadciq_m_sv4si (__inactive
, __a
, __b
, __p
);
16041 *__carry_out
= (__builtin_arm_get_fpscr_nzcvqc () >> 29) & 0x1u
;
16045 __extension__
extern __inline uint32x4_t
16046 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16047 __arm_vadciq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, unsigned * __carry_out
, mve_pred16_t __p
)
16049 uint32x4_t __res
= __builtin_mve_vadciq_m_uv4si (__inactive
, __a
, __b
, __p
);
16050 *__carry_out
= (__builtin_arm_get_fpscr_nzcvqc () >> 29) & 0x1u
;
16054 __extension__
extern __inline int32x4_t
16055 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16056 __arm_vadcq_s32 (int32x4_t __a
, int32x4_t __b
, unsigned * __carry
)
16058 __builtin_arm_set_fpscr_nzcvqc((__builtin_arm_get_fpscr_nzcvqc () & ~0x20000000u
) | (*__carry
<< 29));
16059 int32x4_t __res
= __builtin_mve_vadcq_sv4si (__a
, __b
);
16060 *__carry
= (__builtin_arm_get_fpscr_nzcvqc () >> 29) & 0x1u
;
16064 __extension__
extern __inline uint32x4_t
16065 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16066 __arm_vadcq_u32 (uint32x4_t __a
, uint32x4_t __b
, unsigned * __carry
)
16068 __builtin_arm_set_fpscr_nzcvqc((__builtin_arm_get_fpscr_nzcvqc () & ~0x20000000u
) | (*__carry
<< 29));
16069 uint32x4_t __res
= __builtin_mve_vadcq_uv4si (__a
, __b
);
16070 *__carry
= (__builtin_arm_get_fpscr_nzcvqc () >> 29) & 0x1u
;
16074 __extension__
extern __inline int32x4_t
16075 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16076 __arm_vadcq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, unsigned * __carry
, mve_pred16_t __p
)
16078 __builtin_arm_set_fpscr_nzcvqc((__builtin_arm_get_fpscr_nzcvqc () & ~0x20000000u
) | (*__carry
<< 29));
16079 int32x4_t __res
= __builtin_mve_vadcq_m_sv4si (__inactive
, __a
, __b
, __p
);
16080 *__carry
= (__builtin_arm_get_fpscr_nzcvqc () >> 29) & 0x1u
;
16084 __extension__
extern __inline uint32x4_t
16085 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16086 __arm_vadcq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, unsigned * __carry
, mve_pred16_t __p
)
16088 __builtin_arm_set_fpscr_nzcvqc((__builtin_arm_get_fpscr_nzcvqc () & ~0x20000000u
) | (*__carry
<< 29));
16089 uint32x4_t __res
= __builtin_mve_vadcq_m_uv4si (__inactive
, __a
, __b
, __p
);
16090 *__carry
= (__builtin_arm_get_fpscr_nzcvqc () >> 29) & 0x1u
;
16094 __extension__
extern __inline int32x4_t
16095 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16096 __arm_vsbciq_s32 (int32x4_t __a
, int32x4_t __b
, unsigned * __carry_out
)
16098 int32x4_t __res
= __builtin_mve_vsbciq_sv4si (__a
, __b
);
16099 *__carry_out
= (__builtin_arm_get_fpscr_nzcvqc () >> 29) & 0x1u
;
16103 __extension__
extern __inline uint32x4_t
16104 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16105 __arm_vsbciq_u32 (uint32x4_t __a
, uint32x4_t __b
, unsigned * __carry_out
)
16107 uint32x4_t __res
= __builtin_mve_vsbciq_uv4si (__a
, __b
);
16108 *__carry_out
= (__builtin_arm_get_fpscr_nzcvqc () >> 29) & 0x1u
;
16112 __extension__
extern __inline int32x4_t
16113 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16114 __arm_vsbciq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, unsigned * __carry_out
, mve_pred16_t __p
)
16116 int32x4_t __res
= __builtin_mve_vsbciq_m_sv4si (__inactive
, __a
, __b
, __p
);
16117 *__carry_out
= (__builtin_arm_get_fpscr_nzcvqc () >> 29) & 0x1u
;
16121 __extension__
extern __inline uint32x4_t
16122 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16123 __arm_vsbciq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, unsigned * __carry_out
, mve_pred16_t __p
)
16125 uint32x4_t __res
= __builtin_mve_vsbciq_m_uv4si (__inactive
, __a
, __b
, __p
);
16126 *__carry_out
= (__builtin_arm_get_fpscr_nzcvqc () >> 29) & 0x1u
;
16130 __extension__
extern __inline int32x4_t
16131 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16132 __arm_vsbcq_s32 (int32x4_t __a
, int32x4_t __b
, unsigned * __carry
)
16134 __builtin_arm_set_fpscr_nzcvqc((__builtin_arm_get_fpscr_nzcvqc () & ~0x20000000u
) | (*__carry
<< 29));
16135 int32x4_t __res
= __builtin_mve_vsbcq_sv4si (__a
, __b
);
16136 *__carry
= (__builtin_arm_get_fpscr_nzcvqc () >> 29) & 0x1u
;
16140 __extension__
extern __inline uint32x4_t
16141 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16142 __arm_vsbcq_u32 (uint32x4_t __a
, uint32x4_t __b
, unsigned * __carry
)
16144 __builtin_arm_set_fpscr_nzcvqc((__builtin_arm_get_fpscr_nzcvqc () & ~0x20000000u
) | (*__carry
<< 29));
16145 uint32x4_t __res
= __builtin_mve_vsbcq_uv4si (__a
, __b
);
16146 *__carry
= (__builtin_arm_get_fpscr_nzcvqc () >> 29) & 0x1u
;
16150 __extension__
extern __inline int32x4_t
16151 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16152 __arm_vsbcq_m_s32 (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, unsigned * __carry
, mve_pred16_t __p
)
16154 __builtin_arm_set_fpscr_nzcvqc((__builtin_arm_get_fpscr_nzcvqc () & ~0x20000000u
) | (*__carry
<< 29));
16155 int32x4_t __res
= __builtin_mve_vsbcq_m_sv4si (__inactive
, __a
, __b
, __p
);
16156 *__carry
= (__builtin_arm_get_fpscr_nzcvqc () >> 29) & 0x1u
;
16160 __extension__
extern __inline uint32x4_t
16161 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16162 __arm_vsbcq_m_u32 (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, unsigned * __carry
, mve_pred16_t __p
)
16164 __builtin_arm_set_fpscr_nzcvqc((__builtin_arm_get_fpscr_nzcvqc () & ~0x20000000u
) | (*__carry
<< 29));
16165 uint32x4_t __res
= __builtin_mve_vsbcq_m_uv4si (__inactive
, __a
, __b
, __p
);
16166 *__carry
= (__builtin_arm_get_fpscr_nzcvqc () >> 29) & 0x1u
;
16170 __extension__
extern __inline
void
16171 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16172 __arm_vst1q_p_u8 (uint8_t * __addr
, uint8x16_t __value
, mve_pred16_t __p
)
16174 return vstrbq_p_u8 (__addr
, __value
, __p
);
16177 __extension__
extern __inline
void
16178 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16179 __arm_vst1q_p_s8 (int8_t * __addr
, int8x16_t __value
, mve_pred16_t __p
)
16181 return vstrbq_p_s8 (__addr
, __value
, __p
);
16184 __extension__
extern __inline
void
16185 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16186 __arm_vst2q_s8 (int8_t * __addr
, int8x16x2_t __value
)
16188 union { int8x16x2_t __i
; __builtin_neon_oi __o
; } __rv
;
16189 __rv
.__i
= __value
;
16190 __builtin_mve_vst2qv16qi ((__builtin_neon_qi
*) __addr
, __rv
.__o
);
16193 __extension__
extern __inline
void
16194 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16195 __arm_vst2q_u8 (uint8_t * __addr
, uint8x16x2_t __value
)
16197 union { uint8x16x2_t __i
; __builtin_neon_oi __o
; } __rv
;
16198 __rv
.__i
= __value
;
16199 __builtin_mve_vst2qv16qi ((__builtin_neon_qi
*) __addr
, __rv
.__o
);
16202 __extension__
extern __inline uint8x16_t
16203 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16204 __arm_vld1q_z_u8 (uint8_t const *__base
, mve_pred16_t __p
)
16206 return vldrbq_z_u8 ( __base
, __p
);
16209 __extension__
extern __inline int8x16_t
16210 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16211 __arm_vld1q_z_s8 (int8_t const *__base
, mve_pred16_t __p
)
16213 return vldrbq_z_s8 ( __base
, __p
);
16216 __extension__
extern __inline int8x16x2_t
16217 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16218 __arm_vld2q_s8 (int8_t const * __addr
)
16220 union { int8x16x2_t __i
; __builtin_neon_oi __o
; } __rv
;
16221 __rv
.__o
= __builtin_mve_vld2qv16qi ((__builtin_neon_qi
*) __addr
);
16225 __extension__
extern __inline uint8x16x2_t
16226 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16227 __arm_vld2q_u8 (uint8_t const * __addr
)
16229 union { uint8x16x2_t __i
; __builtin_neon_oi __o
; } __rv
;
16230 __rv
.__o
= __builtin_mve_vld2qv16qi ((__builtin_neon_qi
*) __addr
);
16234 __extension__
extern __inline int8x16x4_t
16235 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16236 __arm_vld4q_s8 (int8_t const * __addr
)
16238 union { int8x16x4_t __i
; __builtin_neon_xi __o
; } __rv
;
16239 __rv
.__o
= __builtin_mve_vld4qv16qi ((__builtin_neon_qi
*) __addr
);
16243 __extension__
extern __inline uint8x16x4_t
16244 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16245 __arm_vld4q_u8 (uint8_t const * __addr
)
16247 union { uint8x16x4_t __i
; __builtin_neon_xi __o
; } __rv
;
16248 __rv
.__o
= __builtin_mve_vld4qv16qi ((__builtin_neon_qi
*) __addr
);
16252 __extension__
extern __inline
void
16253 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16254 __arm_vst1q_p_u16 (uint16_t * __addr
, uint16x8_t __value
, mve_pred16_t __p
)
16256 return vstrhq_p_u16 (__addr
, __value
, __p
);
16259 __extension__
extern __inline
void
16260 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16261 __arm_vst1q_p_s16 (int16_t * __addr
, int16x8_t __value
, mve_pred16_t __p
)
16263 return vstrhq_p_s16 (__addr
, __value
, __p
);
16266 __extension__
extern __inline
void
16267 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16268 __arm_vst2q_s16 (int16_t * __addr
, int16x8x2_t __value
)
16270 union { int16x8x2_t __i
; __builtin_neon_oi __o
; } __rv
;
16271 __rv
.__i
= __value
;
16272 __builtin_mve_vst2qv8hi ((__builtin_neon_hi
*) __addr
, __rv
.__o
);
16275 __extension__
extern __inline
void
16276 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16277 __arm_vst2q_u16 (uint16_t * __addr
, uint16x8x2_t __value
)
16279 union { uint16x8x2_t __i
; __builtin_neon_oi __o
; } __rv
;
16280 __rv
.__i
= __value
;
16281 __builtin_mve_vst2qv8hi ((__builtin_neon_hi
*) __addr
, __rv
.__o
);
16284 __extension__
extern __inline uint16x8_t
16285 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16286 __arm_vld1q_z_u16 (uint16_t const *__base
, mve_pred16_t __p
)
16288 return vldrhq_z_u16 ( __base
, __p
);
16291 __extension__
extern __inline int16x8_t
16292 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16293 __arm_vld1q_z_s16 (int16_t const *__base
, mve_pred16_t __p
)
16295 return vldrhq_z_s16 ( __base
, __p
);
16298 __extension__
extern __inline int16x8x2_t
16299 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16300 __arm_vld2q_s16 (int16_t const * __addr
)
16302 union { int16x8x2_t __i
; __builtin_neon_oi __o
; } __rv
;
16303 __rv
.__o
= __builtin_mve_vld2qv8hi ((__builtin_neon_hi
*) __addr
);
16307 __extension__
extern __inline uint16x8x2_t
16308 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16309 __arm_vld2q_u16 (uint16_t const * __addr
)
16311 union { uint16x8x2_t __i
; __builtin_neon_oi __o
; } __rv
;
16312 __rv
.__o
= __builtin_mve_vld2qv8hi ((__builtin_neon_hi
*) __addr
);
16316 __extension__
extern __inline int16x8x4_t
16317 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16318 __arm_vld4q_s16 (int16_t const * __addr
)
16320 union { int16x8x4_t __i
; __builtin_neon_xi __o
; } __rv
;
16321 __rv
.__o
= __builtin_mve_vld4qv8hi ((__builtin_neon_hi
*) __addr
);
16325 __extension__
extern __inline uint16x8x4_t
16326 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16327 __arm_vld4q_u16 (uint16_t const * __addr
)
16329 union { uint16x8x4_t __i
; __builtin_neon_xi __o
; } __rv
;
16330 __rv
.__o
= __builtin_mve_vld4qv8hi ((__builtin_neon_hi
*) __addr
);
16334 __extension__
extern __inline
void
16335 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16336 __arm_vst1q_p_u32 (uint32_t * __addr
, uint32x4_t __value
, mve_pred16_t __p
)
16338 return vstrwq_p_u32 (__addr
, __value
, __p
);
16341 __extension__
extern __inline
void
16342 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16343 __arm_vst1q_p_s32 (int32_t * __addr
, int32x4_t __value
, mve_pred16_t __p
)
16345 return vstrwq_p_s32 (__addr
, __value
, __p
);
16348 __extension__
extern __inline
void
16349 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16350 __arm_vst2q_s32 (int32_t * __addr
, int32x4x2_t __value
)
16352 union { int32x4x2_t __i
; __builtin_neon_oi __o
; } __rv
;
16353 __rv
.__i
= __value
;
16354 __builtin_mve_vst2qv4si ((__builtin_neon_si
*) __addr
, __rv
.__o
);
16357 __extension__
extern __inline
void
16358 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16359 __arm_vst2q_u32 (uint32_t * __addr
, uint32x4x2_t __value
)
16361 union { uint32x4x2_t __i
; __builtin_neon_oi __o
; } __rv
;
16362 __rv
.__i
= __value
;
16363 __builtin_mve_vst2qv4si ((__builtin_neon_si
*) __addr
, __rv
.__o
);
16366 __extension__
extern __inline uint32x4_t
16367 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16368 __arm_vld1q_z_u32 (uint32_t const *__base
, mve_pred16_t __p
)
16370 return vldrwq_z_u32 ( __base
, __p
);
16373 __extension__
extern __inline int32x4_t
16374 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16375 __arm_vld1q_z_s32 (int32_t const *__base
, mve_pred16_t __p
)
16377 return vldrwq_z_s32 ( __base
, __p
);
16380 __extension__
extern __inline int32x4x2_t
16381 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16382 __arm_vld2q_s32 (int32_t const * __addr
)
16384 union { int32x4x2_t __i
; __builtin_neon_oi __o
; } __rv
;
16385 __rv
.__o
= __builtin_mve_vld2qv4si ((__builtin_neon_si
*) __addr
);
16389 __extension__
extern __inline uint32x4x2_t
16390 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16391 __arm_vld2q_u32 (uint32_t const * __addr
)
16393 union { uint32x4x2_t __i
; __builtin_neon_oi __o
; } __rv
;
16394 __rv
.__o
= __builtin_mve_vld2qv4si ((__builtin_neon_si
*) __addr
);
16398 __extension__
extern __inline int32x4x4_t
16399 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16400 __arm_vld4q_s32 (int32_t const * __addr
)
16402 union { int32x4x4_t __i
; __builtin_neon_xi __o
; } __rv
;
16403 __rv
.__o
= __builtin_mve_vld4qv4si ((__builtin_neon_si
*) __addr
);
16407 __extension__
extern __inline uint32x4x4_t
16408 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16409 __arm_vld4q_u32 (uint32_t const * __addr
)
16411 union { uint32x4x4_t __i
; __builtin_neon_xi __o
; } __rv
;
16412 __rv
.__o
= __builtin_mve_vld4qv4si ((__builtin_neon_si
*) __addr
);
16416 __extension__
extern __inline int16x8_t
16417 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16418 __arm_vsetq_lane_s16 (int16_t __a
, int16x8_t __b
, const int __idx
)
16420 __ARM_CHECK_LANEQ (__b
, __idx
);
16421 __b
[__ARM_LANEQ(__b
,__idx
)] = __a
;
16425 __extension__
extern __inline int32x4_t
16426 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16427 __arm_vsetq_lane_s32 (int32_t __a
, int32x4_t __b
, const int __idx
)
16429 __ARM_CHECK_LANEQ (__b
, __idx
);
16430 __b
[__ARM_LANEQ(__b
,__idx
)] = __a
;
16434 __extension__
extern __inline int8x16_t
16435 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16436 __arm_vsetq_lane_s8 (int8_t __a
, int8x16_t __b
, const int __idx
)
16438 __ARM_CHECK_LANEQ (__b
, __idx
);
16439 __b
[__ARM_LANEQ(__b
,__idx
)] = __a
;
16443 __extension__
extern __inline int64x2_t
16444 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16445 __arm_vsetq_lane_s64 (int64_t __a
, int64x2_t __b
, const int __idx
)
16447 __ARM_CHECK_LANEQ (__b
, __idx
);
16448 __b
[__ARM_LANEQ(__b
,__idx
)] = __a
;
16452 __extension__
extern __inline uint8x16_t
16453 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16454 __arm_vsetq_lane_u8 (uint8_t __a
, uint8x16_t __b
, const int __idx
)
16456 __ARM_CHECK_LANEQ (__b
, __idx
);
16457 __b
[__ARM_LANEQ(__b
,__idx
)] = __a
;
16461 __extension__
extern __inline uint16x8_t
16462 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16463 __arm_vsetq_lane_u16 (uint16_t __a
, uint16x8_t __b
, const int __idx
)
16465 __ARM_CHECK_LANEQ (__b
, __idx
);
16466 __b
[__ARM_LANEQ(__b
,__idx
)] = __a
;
16470 __extension__
extern __inline uint32x4_t
16471 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16472 __arm_vsetq_lane_u32 (uint32_t __a
, uint32x4_t __b
, const int __idx
)
16474 __ARM_CHECK_LANEQ (__b
, __idx
);
16475 __b
[__ARM_LANEQ(__b
,__idx
)] = __a
;
16479 __extension__
extern __inline uint64x2_t
16480 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16481 __arm_vsetq_lane_u64 (uint64_t __a
, uint64x2_t __b
, const int __idx
)
16483 __ARM_CHECK_LANEQ (__b
, __idx
);
16484 __b
[__ARM_LANEQ(__b
,__idx
)] = __a
;
16488 __extension__
extern __inline
int16_t
16489 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16490 __arm_vgetq_lane_s16 (int16x8_t __a
, const int __idx
)
16492 __ARM_CHECK_LANEQ (__a
, __idx
);
16493 return __a
[__ARM_LANEQ(__a
,__idx
)];
16496 __extension__
extern __inline
int32_t
16497 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16498 __arm_vgetq_lane_s32 (int32x4_t __a
, const int __idx
)
16500 __ARM_CHECK_LANEQ (__a
, __idx
);
16501 return __a
[__ARM_LANEQ(__a
,__idx
)];
16504 __extension__
extern __inline
int8_t
16505 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16506 __arm_vgetq_lane_s8 (int8x16_t __a
, const int __idx
)
16508 __ARM_CHECK_LANEQ (__a
, __idx
);
16509 return __a
[__ARM_LANEQ(__a
,__idx
)];
16512 __extension__
extern __inline
int64_t
16513 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16514 __arm_vgetq_lane_s64 (int64x2_t __a
, const int __idx
)
16516 __ARM_CHECK_LANEQ (__a
, __idx
);
16517 return __a
[__ARM_LANEQ(__a
,__idx
)];
16520 __extension__
extern __inline
uint8_t
16521 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16522 __arm_vgetq_lane_u8 (uint8x16_t __a
, const int __idx
)
16524 __ARM_CHECK_LANEQ (__a
, __idx
);
16525 return __a
[__ARM_LANEQ(__a
,__idx
)];
16528 __extension__
extern __inline
uint16_t
16529 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16530 __arm_vgetq_lane_u16 (uint16x8_t __a
, const int __idx
)
16532 __ARM_CHECK_LANEQ (__a
, __idx
);
16533 return __a
[__ARM_LANEQ(__a
,__idx
)];
16536 __extension__
extern __inline
uint32_t
16537 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16538 __arm_vgetq_lane_u32 (uint32x4_t __a
, const int __idx
)
16540 __ARM_CHECK_LANEQ (__a
, __idx
);
16541 return __a
[__ARM_LANEQ(__a
,__idx
)];
16544 __extension__
extern __inline
uint64_t
16545 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16546 __arm_vgetq_lane_u64 (uint64x2_t __a
, const int __idx
)
16548 __ARM_CHECK_LANEQ (__a
, __idx
);
16549 return __a
[__ARM_LANEQ(__a
,__idx
)];
16552 __extension__
extern __inline
uint64_t
16553 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16554 __arm_lsll (uint64_t value
, int32_t shift
)
16556 return (value
<< shift
);
16559 __extension__
extern __inline
int64_t
16560 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16561 __arm_asrl (int64_t value
, int32_t shift
)
16563 return (value
>> shift
);
16566 __extension__
extern __inline
uint64_t
16567 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16568 __arm_uqrshll (uint64_t value
, int32_t shift
)
16570 return __builtin_mve_uqrshll_sat64_di (value
, shift
);
16573 __extension__
extern __inline
uint64_t
16574 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16575 __arm_uqrshll_sat48 (uint64_t value
, int32_t shift
)
16577 return __builtin_mve_uqrshll_sat48_di (value
, shift
);
16580 __extension__
extern __inline
int64_t
16581 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16582 __arm_sqrshrl (int64_t value
, int32_t shift
)
16584 return __builtin_mve_sqrshrl_sat64_di (value
, shift
);
16587 __extension__
extern __inline
int64_t
16588 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16589 __arm_sqrshrl_sat48 (int64_t value
, int32_t shift
)
16591 return __builtin_mve_sqrshrl_sat48_di (value
, shift
);
16594 __extension__
extern __inline
uint64_t
16595 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16596 __arm_uqshll (uint64_t value
, const int shift
)
16598 return __builtin_mve_uqshll_di (value
, shift
);
16601 __extension__
extern __inline
uint64_t
16602 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16603 __arm_urshrl (uint64_t value
, const int shift
)
16605 return __builtin_mve_urshrl_di (value
, shift
);
16608 __extension__
extern __inline
int64_t
16609 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16610 __arm_srshrl (int64_t value
, const int shift
)
16612 return __builtin_mve_srshrl_di (value
, shift
);
16615 __extension__
extern __inline
int64_t
16616 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16617 __arm_sqshll (int64_t value
, const int shift
)
16619 return __builtin_mve_sqshll_di (value
, shift
);
16622 __extension__
extern __inline
uint32_t
16623 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16624 __arm_uqrshl (uint32_t value
, int32_t shift
)
16626 return __builtin_mve_uqrshl_si (value
, shift
);
16629 __extension__
extern __inline
int32_t
16630 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16631 __arm_sqrshr (int32_t value
, int32_t shift
)
16633 return __builtin_mve_sqrshr_si (value
, shift
);
16636 __extension__
extern __inline
uint32_t
16637 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16638 __arm_uqshl (uint32_t value
, const int shift
)
16640 return __builtin_mve_uqshl_si (value
, shift
);
16643 __extension__
extern __inline
uint32_t
16644 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16645 __arm_urshr (uint32_t value
, const int shift
)
16647 return __builtin_mve_urshr_si (value
, shift
);
16650 __extension__
extern __inline
int32_t
16651 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16652 __arm_sqshl (int32_t value
, const int shift
)
16654 return __builtin_mve_sqshl_si (value
, shift
);
16657 __extension__
extern __inline
int32_t
16658 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16659 __arm_srshr (int32_t value
, const int shift
)
16661 return __builtin_mve_srshr_si (value
, shift
);
16664 __extension__
extern __inline int8x16_t
16665 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16666 __arm_vshlcq_m_s8 (int8x16_t __a
, uint32_t * __b
, const int __imm
, mve_pred16_t __p
)
16668 int8x16_t __res
= __builtin_mve_vshlcq_m_vec_sv16qi (__a
, *__b
, __imm
, __p
);
16669 *__b
= __builtin_mve_vshlcq_m_carry_sv16qi (__a
, *__b
, __imm
, __p
);
16673 __extension__
extern __inline uint8x16_t
16674 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16675 __arm_vshlcq_m_u8 (uint8x16_t __a
, uint32_t * __b
, const int __imm
, mve_pred16_t __p
)
16677 uint8x16_t __res
= __builtin_mve_vshlcq_m_vec_uv16qi (__a
, *__b
, __imm
, __p
);
16678 *__b
= __builtin_mve_vshlcq_m_carry_uv16qi (__a
, *__b
, __imm
, __p
);
16682 __extension__
extern __inline int16x8_t
16683 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16684 __arm_vshlcq_m_s16 (int16x8_t __a
, uint32_t * __b
, const int __imm
, mve_pred16_t __p
)
16686 int16x8_t __res
= __builtin_mve_vshlcq_m_vec_sv8hi (__a
, *__b
, __imm
, __p
);
16687 *__b
= __builtin_mve_vshlcq_m_carry_sv8hi (__a
, *__b
, __imm
, __p
);
16691 __extension__
extern __inline uint16x8_t
16692 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16693 __arm_vshlcq_m_u16 (uint16x8_t __a
, uint32_t * __b
, const int __imm
, mve_pred16_t __p
)
16695 uint16x8_t __res
= __builtin_mve_vshlcq_m_vec_uv8hi (__a
, *__b
, __imm
, __p
);
16696 *__b
= __builtin_mve_vshlcq_m_carry_uv8hi (__a
, *__b
, __imm
, __p
);
16700 __extension__
extern __inline int32x4_t
16701 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16702 __arm_vshlcq_m_s32 (int32x4_t __a
, uint32_t * __b
, const int __imm
, mve_pred16_t __p
)
16704 int32x4_t __res
= __builtin_mve_vshlcq_m_vec_sv4si (__a
, *__b
, __imm
, __p
);
16705 *__b
= __builtin_mve_vshlcq_m_carry_sv4si (__a
, *__b
, __imm
, __p
);
16709 __extension__
extern __inline uint32x4_t
16710 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16711 __arm_vshlcq_m_u32 (uint32x4_t __a
, uint32_t * __b
, const int __imm
, mve_pred16_t __p
)
16713 uint32x4_t __res
= __builtin_mve_vshlcq_m_vec_uv4si (__a
, *__b
, __imm
, __p
);
16714 *__b
= __builtin_mve_vshlcq_m_carry_uv4si (__a
, *__b
, __imm
, __p
);
16718 #if (__ARM_FEATURE_MVE & 2) /* MVE Floating point. */
16720 __extension__
extern __inline
void
16721 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16722 __arm_vst4q_f16 (float16_t
* __addr
, float16x8x4_t __value
)
16724 union { float16x8x4_t __i
; __builtin_neon_xi __o
; } __rv
;
16725 __rv
.__i
= __value
;
16726 __builtin_mve_vst4qv8hf (__addr
, __rv
.__o
);
16729 __extension__
extern __inline
void
16730 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16731 __arm_vst4q_f32 (float32_t
* __addr
, float32x4x4_t __value
)
16733 union { float32x4x4_t __i
; __builtin_neon_xi __o
; } __rv
;
16734 __rv
.__i
= __value
;
16735 __builtin_mve_vst4qv4sf (__addr
, __rv
.__o
);
16738 __extension__
extern __inline float16x8_t
16739 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16740 __arm_vrndxq_f16 (float16x8_t __a
)
16742 return __builtin_mve_vrndxq_fv8hf (__a
);
16745 __extension__
extern __inline float32x4_t
16746 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16747 __arm_vrndxq_f32 (float32x4_t __a
)
16749 return __builtin_mve_vrndxq_fv4sf (__a
);
16752 __extension__
extern __inline float16x8_t
16753 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16754 __arm_vrndq_f16 (float16x8_t __a
)
16756 return __builtin_mve_vrndq_fv8hf (__a
);
16759 __extension__
extern __inline float32x4_t
16760 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16761 __arm_vrndq_f32 (float32x4_t __a
)
16763 return __builtin_mve_vrndq_fv4sf (__a
);
16766 __extension__
extern __inline float16x8_t
16767 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16768 __arm_vrndpq_f16 (float16x8_t __a
)
16770 return __builtin_mve_vrndpq_fv8hf (__a
);
16773 __extension__
extern __inline float32x4_t
16774 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16775 __arm_vrndpq_f32 (float32x4_t __a
)
16777 return __builtin_mve_vrndpq_fv4sf (__a
);
16780 __extension__
extern __inline float16x8_t
16781 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16782 __arm_vrndnq_f16 (float16x8_t __a
)
16784 return __builtin_mve_vrndnq_fv8hf (__a
);
16787 __extension__
extern __inline float32x4_t
16788 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16789 __arm_vrndnq_f32 (float32x4_t __a
)
16791 return __builtin_mve_vrndnq_fv4sf (__a
);
16794 __extension__
extern __inline float16x8_t
16795 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16796 __arm_vrndmq_f16 (float16x8_t __a
)
16798 return __builtin_mve_vrndmq_fv8hf (__a
);
16801 __extension__
extern __inline float32x4_t
16802 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16803 __arm_vrndmq_f32 (float32x4_t __a
)
16805 return __builtin_mve_vrndmq_fv4sf (__a
);
16808 __extension__
extern __inline float16x8_t
16809 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16810 __arm_vrndaq_f16 (float16x8_t __a
)
16812 return __builtin_mve_vrndaq_fv8hf (__a
);
16815 __extension__
extern __inline float32x4_t
16816 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16817 __arm_vrndaq_f32 (float32x4_t __a
)
16819 return __builtin_mve_vrndaq_fv4sf (__a
);
16822 __extension__
extern __inline float16x8_t
16823 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16824 __arm_vrev64q_f16 (float16x8_t __a
)
16826 return __builtin_mve_vrev64q_fv8hf (__a
);
16829 __extension__
extern __inline float32x4_t
16830 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16831 __arm_vrev64q_f32 (float32x4_t __a
)
16833 return __builtin_mve_vrev64q_fv4sf (__a
);
16836 __extension__
extern __inline float16x8_t
16837 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16838 __arm_vnegq_f16 (float16x8_t __a
)
16840 return __builtin_mve_vnegq_fv8hf (__a
);
16843 __extension__
extern __inline float32x4_t
16844 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16845 __arm_vnegq_f32 (float32x4_t __a
)
16847 return __builtin_mve_vnegq_fv4sf (__a
);
16850 __extension__
extern __inline float16x8_t
16851 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16852 __arm_vdupq_n_f16 (float16_t __a
)
16854 return __builtin_mve_vdupq_n_fv8hf (__a
);
16857 __extension__
extern __inline float32x4_t
16858 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16859 __arm_vdupq_n_f32 (float32_t __a
)
16861 return __builtin_mve_vdupq_n_fv4sf (__a
);
16864 __extension__
extern __inline float16x8_t
16865 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16866 __arm_vabsq_f16 (float16x8_t __a
)
16868 return __builtin_mve_vabsq_fv8hf (__a
);
16871 __extension__
extern __inline float32x4_t
16872 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16873 __arm_vabsq_f32 (float32x4_t __a
)
16875 return __builtin_mve_vabsq_fv4sf (__a
);
16878 __extension__
extern __inline float16x8_t
16879 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16880 __arm_vrev32q_f16 (float16x8_t __a
)
16882 return __builtin_mve_vrev32q_fv8hf (__a
);
16885 __extension__
extern __inline float32x4_t
16886 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16887 __arm_vcvttq_f32_f16 (float16x8_t __a
)
16889 return __builtin_mve_vcvttq_f32_f16v4sf (__a
);
16892 __extension__
extern __inline float32x4_t
16893 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16894 __arm_vcvtbq_f32_f16 (float16x8_t __a
)
16896 return __builtin_mve_vcvtbq_f32_f16v4sf (__a
);
16899 __extension__
extern __inline float16x8_t
16900 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16901 __arm_vcvtq_f16_s16 (int16x8_t __a
)
16903 return __builtin_mve_vcvtq_to_f_sv8hf (__a
);
16906 __extension__
extern __inline float32x4_t
16907 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16908 __arm_vcvtq_f32_s32 (int32x4_t __a
)
16910 return __builtin_mve_vcvtq_to_f_sv4sf (__a
);
16913 __extension__
extern __inline float16x8_t
16914 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16915 __arm_vcvtq_f16_u16 (uint16x8_t __a
)
16917 return __builtin_mve_vcvtq_to_f_uv8hf (__a
);
16920 __extension__
extern __inline float32x4_t
16921 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16922 __arm_vcvtq_f32_u32 (uint32x4_t __a
)
16924 return __builtin_mve_vcvtq_to_f_uv4sf (__a
);
16927 __extension__
extern __inline int16x8_t
16928 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16929 __arm_vcvtq_s16_f16 (float16x8_t __a
)
16931 return __builtin_mve_vcvtq_from_f_sv8hi (__a
);
16934 __extension__
extern __inline int32x4_t
16935 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16936 __arm_vcvtq_s32_f32 (float32x4_t __a
)
16938 return __builtin_mve_vcvtq_from_f_sv4si (__a
);
16941 __extension__
extern __inline uint16x8_t
16942 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16943 __arm_vcvtq_u16_f16 (float16x8_t __a
)
16945 return __builtin_mve_vcvtq_from_f_uv8hi (__a
);
16948 __extension__
extern __inline uint32x4_t
16949 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16950 __arm_vcvtq_u32_f32 (float32x4_t __a
)
16952 return __builtin_mve_vcvtq_from_f_uv4si (__a
);
16955 __extension__
extern __inline uint16x8_t
16956 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16957 __arm_vcvtpq_u16_f16 (float16x8_t __a
)
16959 return __builtin_mve_vcvtpq_uv8hi (__a
);
16962 __extension__
extern __inline uint32x4_t
16963 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16964 __arm_vcvtpq_u32_f32 (float32x4_t __a
)
16966 return __builtin_mve_vcvtpq_uv4si (__a
);
16969 __extension__
extern __inline uint16x8_t
16970 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16971 __arm_vcvtnq_u16_f16 (float16x8_t __a
)
16973 return __builtin_mve_vcvtnq_uv8hi (__a
);
16976 __extension__
extern __inline uint32x4_t
16977 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16978 __arm_vcvtnq_u32_f32 (float32x4_t __a
)
16980 return __builtin_mve_vcvtnq_uv4si (__a
);
16983 __extension__
extern __inline uint16x8_t
16984 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16985 __arm_vcvtmq_u16_f16 (float16x8_t __a
)
16987 return __builtin_mve_vcvtmq_uv8hi (__a
);
16990 __extension__
extern __inline uint32x4_t
16991 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16992 __arm_vcvtmq_u32_f32 (float32x4_t __a
)
16994 return __builtin_mve_vcvtmq_uv4si (__a
);
16997 __extension__
extern __inline uint16x8_t
16998 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
16999 __arm_vcvtaq_u16_f16 (float16x8_t __a
)
17001 return __builtin_mve_vcvtaq_uv8hi (__a
);
17004 __extension__
extern __inline uint32x4_t
17005 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17006 __arm_vcvtaq_u32_f32 (float32x4_t __a
)
17008 return __builtin_mve_vcvtaq_uv4si (__a
);
17011 __extension__
extern __inline int16x8_t
17012 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17013 __arm_vcvtaq_s16_f16 (float16x8_t __a
)
17015 return __builtin_mve_vcvtaq_sv8hi (__a
);
17018 __extension__
extern __inline int32x4_t
17019 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17020 __arm_vcvtaq_s32_f32 (float32x4_t __a
)
17022 return __builtin_mve_vcvtaq_sv4si (__a
);
17025 __extension__
extern __inline int16x8_t
17026 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17027 __arm_vcvtnq_s16_f16 (float16x8_t __a
)
17029 return __builtin_mve_vcvtnq_sv8hi (__a
);
17032 __extension__
extern __inline int32x4_t
17033 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17034 __arm_vcvtnq_s32_f32 (float32x4_t __a
)
17036 return __builtin_mve_vcvtnq_sv4si (__a
);
17039 __extension__
extern __inline int16x8_t
17040 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17041 __arm_vcvtpq_s16_f16 (float16x8_t __a
)
17043 return __builtin_mve_vcvtpq_sv8hi (__a
);
17046 __extension__
extern __inline int32x4_t
17047 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17048 __arm_vcvtpq_s32_f32 (float32x4_t __a
)
17050 return __builtin_mve_vcvtpq_sv4si (__a
);
17053 __extension__
extern __inline int16x8_t
17054 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17055 __arm_vcvtmq_s16_f16 (float16x8_t __a
)
17057 return __builtin_mve_vcvtmq_sv8hi (__a
);
17060 __extension__
extern __inline int32x4_t
17061 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17062 __arm_vcvtmq_s32_f32 (float32x4_t __a
)
17064 return __builtin_mve_vcvtmq_sv4si (__a
);
17067 __extension__
extern __inline float16x8_t
17068 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17069 __arm_vsubq_n_f16 (float16x8_t __a
, float16_t __b
)
17071 return __builtin_mve_vsubq_n_fv8hf (__a
, __b
);
17074 __extension__
extern __inline float32x4_t
17075 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17076 __arm_vsubq_n_f32 (float32x4_t __a
, float32_t __b
)
17078 return __builtin_mve_vsubq_n_fv4sf (__a
, __b
);
17081 __extension__
extern __inline float16x8_t
17082 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17083 __arm_vbrsrq_n_f16 (float16x8_t __a
, int32_t __b
)
17085 return __builtin_mve_vbrsrq_n_fv8hf (__a
, __b
);
17088 __extension__
extern __inline float32x4_t
17089 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17090 __arm_vbrsrq_n_f32 (float32x4_t __a
, int32_t __b
)
17092 return __builtin_mve_vbrsrq_n_fv4sf (__a
, __b
);
17095 __extension__
extern __inline float16x8_t
17096 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17097 __arm_vcvtq_n_f16_s16 (int16x8_t __a
, const int __imm6
)
17099 return __builtin_mve_vcvtq_n_to_f_sv8hf (__a
, __imm6
);
17102 __extension__
extern __inline float32x4_t
17103 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17104 __arm_vcvtq_n_f32_s32 (int32x4_t __a
, const int __imm6
)
17106 return __builtin_mve_vcvtq_n_to_f_sv4sf (__a
, __imm6
);
17109 __extension__
extern __inline float16x8_t
17110 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17111 __arm_vcvtq_n_f16_u16 (uint16x8_t __a
, const int __imm6
)
17113 return __builtin_mve_vcvtq_n_to_f_uv8hf (__a
, __imm6
);
17116 __extension__
extern __inline float32x4_t
17117 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17118 __arm_vcvtq_n_f32_u32 (uint32x4_t __a
, const int __imm6
)
17120 return __builtin_mve_vcvtq_n_to_f_uv4sf (__a
, __imm6
);
17123 __extension__
extern __inline float16x8_t
17124 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17125 __arm_vcreateq_f16 (uint64_t __a
, uint64_t __b
)
17127 return __builtin_mve_vcreateq_fv8hf (__a
, __b
);
17130 __extension__
extern __inline float32x4_t
17131 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17132 __arm_vcreateq_f32 (uint64_t __a
, uint64_t __b
)
17134 return __builtin_mve_vcreateq_fv4sf (__a
, __b
);
17137 __extension__
extern __inline int16x8_t
17138 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17139 __arm_vcvtq_n_s16_f16 (float16x8_t __a
, const int __imm6
)
17141 return __builtin_mve_vcvtq_n_from_f_sv8hi (__a
, __imm6
);
17144 __extension__
extern __inline int32x4_t
17145 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17146 __arm_vcvtq_n_s32_f32 (float32x4_t __a
, const int __imm6
)
17148 return __builtin_mve_vcvtq_n_from_f_sv4si (__a
, __imm6
);
17151 __extension__
extern __inline uint16x8_t
17152 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17153 __arm_vcvtq_n_u16_f16 (float16x8_t __a
, const int __imm6
)
17155 return __builtin_mve_vcvtq_n_from_f_uv8hi (__a
, __imm6
);
17158 __extension__
extern __inline uint32x4_t
17159 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17160 __arm_vcvtq_n_u32_f32 (float32x4_t __a
, const int __imm6
)
17162 return __builtin_mve_vcvtq_n_from_f_uv4si (__a
, __imm6
);
17165 __extension__
extern __inline mve_pred16_t
17166 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17167 __arm_vcmpneq_n_f16 (float16x8_t __a
, float16_t __b
)
17169 return __builtin_mve_vcmpneq_n_fv8hf (__a
, __b
);
17172 __extension__
extern __inline mve_pred16_t
17173 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17174 __arm_vcmpneq_f16 (float16x8_t __a
, float16x8_t __b
)
17176 return __builtin_mve_vcmpneq_fv8hf (__a
, __b
);
17179 __extension__
extern __inline mve_pred16_t
17180 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17181 __arm_vcmpltq_n_f16 (float16x8_t __a
, float16_t __b
)
17183 return __builtin_mve_vcmpltq_n_fv8hf (__a
, __b
);
17186 __extension__
extern __inline mve_pred16_t
17187 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17188 __arm_vcmpltq_f16 (float16x8_t __a
, float16x8_t __b
)
17190 return __builtin_mve_vcmpltq_fv8hf (__a
, __b
);
17193 __extension__
extern __inline mve_pred16_t
17194 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17195 __arm_vcmpleq_n_f16 (float16x8_t __a
, float16_t __b
)
17197 return __builtin_mve_vcmpleq_n_fv8hf (__a
, __b
);
17200 __extension__
extern __inline mve_pred16_t
17201 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17202 __arm_vcmpleq_f16 (float16x8_t __a
, float16x8_t __b
)
17204 return __builtin_mve_vcmpleq_fv8hf (__a
, __b
);
17207 __extension__
extern __inline mve_pred16_t
17208 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17209 __arm_vcmpgtq_n_f16 (float16x8_t __a
, float16_t __b
)
17211 return __builtin_mve_vcmpgtq_n_fv8hf (__a
, __b
);
17214 __extension__
extern __inline mve_pred16_t
17215 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17216 __arm_vcmpgtq_f16 (float16x8_t __a
, float16x8_t __b
)
17218 return __builtin_mve_vcmpgtq_fv8hf (__a
, __b
);
17221 __extension__
extern __inline mve_pred16_t
17222 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17223 __arm_vcmpgeq_n_f16 (float16x8_t __a
, float16_t __b
)
17225 return __builtin_mve_vcmpgeq_n_fv8hf (__a
, __b
);
17228 __extension__
extern __inline mve_pred16_t
17229 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17230 __arm_vcmpgeq_f16 (float16x8_t __a
, float16x8_t __b
)
17232 return __builtin_mve_vcmpgeq_fv8hf (__a
, __b
);
17235 __extension__
extern __inline mve_pred16_t
17236 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17237 __arm_vcmpeqq_n_f16 (float16x8_t __a
, float16_t __b
)
17239 return __builtin_mve_vcmpeqq_n_fv8hf (__a
, __b
);
17242 __extension__
extern __inline mve_pred16_t
17243 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17244 __arm_vcmpeqq_f16 (float16x8_t __a
, float16x8_t __b
)
17246 return __builtin_mve_vcmpeqq_fv8hf (__a
, __b
);
17249 __extension__
extern __inline float16x8_t
17250 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17251 __arm_vsubq_f16 (float16x8_t __a
, float16x8_t __b
)
17253 return __builtin_mve_vsubq_fv8hf (__a
, __b
);
17256 __extension__
extern __inline float16x8_t
17257 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17258 __arm_vorrq_f16 (float16x8_t __a
, float16x8_t __b
)
17260 return __builtin_mve_vorrq_fv8hf (__a
, __b
);
17263 __extension__
extern __inline float16x8_t
17264 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17265 __arm_vornq_f16 (float16x8_t __a
, float16x8_t __b
)
17267 return __builtin_mve_vornq_fv8hf (__a
, __b
);
17270 __extension__
extern __inline float16x8_t
17271 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17272 __arm_vmulq_n_f16 (float16x8_t __a
, float16_t __b
)
17274 return __builtin_mve_vmulq_n_fv8hf (__a
, __b
);
17277 __extension__
extern __inline float16x8_t
17278 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17279 __arm_vmulq_f16 (float16x8_t __a
, float16x8_t __b
)
17281 return __builtin_mve_vmulq_fv8hf (__a
, __b
);
17284 __extension__
extern __inline float16_t
17285 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17286 __arm_vminnmvq_f16 (float16_t __a
, float16x8_t __b
)
17288 return __builtin_mve_vminnmvq_fv8hf (__a
, __b
);
17291 __extension__
extern __inline float16x8_t
17292 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17293 __arm_vminnmq_f16 (float16x8_t __a
, float16x8_t __b
)
17295 return __builtin_mve_vminnmq_fv8hf (__a
, __b
);
17298 __extension__
extern __inline float16_t
17299 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17300 __arm_vminnmavq_f16 (float16_t __a
, float16x8_t __b
)
17302 return __builtin_mve_vminnmavq_fv8hf (__a
, __b
);
17305 __extension__
extern __inline float16x8_t
17306 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17307 __arm_vminnmaq_f16 (float16x8_t __a
, float16x8_t __b
)
17309 return __builtin_mve_vminnmaq_fv8hf (__a
, __b
);
17312 __extension__
extern __inline float16_t
17313 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17314 __arm_vmaxnmvq_f16 (float16_t __a
, float16x8_t __b
)
17316 return __builtin_mve_vmaxnmvq_fv8hf (__a
, __b
);
17319 __extension__
extern __inline float16x8_t
17320 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17321 __arm_vmaxnmq_f16 (float16x8_t __a
, float16x8_t __b
)
17323 return __builtin_mve_vmaxnmq_fv8hf (__a
, __b
);
17326 __extension__
extern __inline float16_t
17327 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17328 __arm_vmaxnmavq_f16 (float16_t __a
, float16x8_t __b
)
17330 return __builtin_mve_vmaxnmavq_fv8hf (__a
, __b
);
17333 __extension__
extern __inline float16x8_t
17334 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17335 __arm_vmaxnmaq_f16 (float16x8_t __a
, float16x8_t __b
)
17337 return __builtin_mve_vmaxnmaq_fv8hf (__a
, __b
);
17340 __extension__
extern __inline float16x8_t
17341 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17342 __arm_veorq_f16 (float16x8_t __a
, float16x8_t __b
)
17344 return __builtin_mve_veorq_fv8hf (__a
, __b
);
17347 __extension__
extern __inline float16x8_t
17348 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17349 __arm_vcmulq_rot90_f16 (float16x8_t __a
, float16x8_t __b
)
17351 return __builtin_mve_vcmulq_rot90v8hf (__a
, __b
);
17354 __extension__
extern __inline float16x8_t
17355 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17356 __arm_vcmulq_rot270_f16 (float16x8_t __a
, float16x8_t __b
)
17358 return __builtin_mve_vcmulq_rot270v8hf (__a
, __b
);
17361 __extension__
extern __inline float16x8_t
17362 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17363 __arm_vcmulq_rot180_f16 (float16x8_t __a
, float16x8_t __b
)
17365 return __builtin_mve_vcmulq_rot180v8hf (__a
, __b
);
17368 __extension__
extern __inline float16x8_t
17369 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17370 __arm_vcmulq_f16 (float16x8_t __a
, float16x8_t __b
)
17372 return __builtin_mve_vcmulqv8hf (__a
, __b
);
17375 __extension__
extern __inline float16x8_t
17376 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17377 __arm_vcaddq_rot90_f16 (float16x8_t __a
, float16x8_t __b
)
17379 return __builtin_mve_vcaddq_rot90v8hf (__a
, __b
);
17382 __extension__
extern __inline float16x8_t
17383 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17384 __arm_vcaddq_rot270_f16 (float16x8_t __a
, float16x8_t __b
)
17386 return __builtin_mve_vcaddq_rot270v8hf (__a
, __b
);
17389 __extension__
extern __inline float16x8_t
17390 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17391 __arm_vbicq_f16 (float16x8_t __a
, float16x8_t __b
)
17393 return __builtin_mve_vbicq_fv8hf (__a
, __b
);
17396 __extension__
extern __inline float16x8_t
17397 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17398 __arm_vandq_f16 (float16x8_t __a
, float16x8_t __b
)
17400 return __builtin_mve_vandq_fv8hf (__a
, __b
);
17403 __extension__
extern __inline float16x8_t
17404 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17405 __arm_vaddq_n_f16 (float16x8_t __a
, float16_t __b
)
17407 return __builtin_mve_vaddq_n_fv8hf (__a
, __b
);
17410 __extension__
extern __inline float16x8_t
17411 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17412 __arm_vabdq_f16 (float16x8_t __a
, float16x8_t __b
)
17414 return __builtin_mve_vabdq_fv8hf (__a
, __b
);
17417 __extension__
extern __inline mve_pred16_t
17418 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17419 __arm_vcmpneq_n_f32 (float32x4_t __a
, float32_t __b
)
17421 return __builtin_mve_vcmpneq_n_fv4sf (__a
, __b
);
17424 __extension__
extern __inline mve_pred16_t
17425 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17426 __arm_vcmpneq_f32 (float32x4_t __a
, float32x4_t __b
)
17428 return __builtin_mve_vcmpneq_fv4sf (__a
, __b
);
17431 __extension__
extern __inline mve_pred16_t
17432 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17433 __arm_vcmpltq_n_f32 (float32x4_t __a
, float32_t __b
)
17435 return __builtin_mve_vcmpltq_n_fv4sf (__a
, __b
);
17438 __extension__
extern __inline mve_pred16_t
17439 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17440 __arm_vcmpltq_f32 (float32x4_t __a
, float32x4_t __b
)
17442 return __builtin_mve_vcmpltq_fv4sf (__a
, __b
);
17445 __extension__
extern __inline mve_pred16_t
17446 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17447 __arm_vcmpleq_n_f32 (float32x4_t __a
, float32_t __b
)
17449 return __builtin_mve_vcmpleq_n_fv4sf (__a
, __b
);
17452 __extension__
extern __inline mve_pred16_t
17453 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17454 __arm_vcmpleq_f32 (float32x4_t __a
, float32x4_t __b
)
17456 return __builtin_mve_vcmpleq_fv4sf (__a
, __b
);
17459 __extension__
extern __inline mve_pred16_t
17460 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17461 __arm_vcmpgtq_n_f32 (float32x4_t __a
, float32_t __b
)
17463 return __builtin_mve_vcmpgtq_n_fv4sf (__a
, __b
);
17466 __extension__
extern __inline mve_pred16_t
17467 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17468 __arm_vcmpgtq_f32 (float32x4_t __a
, float32x4_t __b
)
17470 return __builtin_mve_vcmpgtq_fv4sf (__a
, __b
);
17473 __extension__
extern __inline mve_pred16_t
17474 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17475 __arm_vcmpgeq_n_f32 (float32x4_t __a
, float32_t __b
)
17477 return __builtin_mve_vcmpgeq_n_fv4sf (__a
, __b
);
17480 __extension__
extern __inline mve_pred16_t
17481 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17482 __arm_vcmpgeq_f32 (float32x4_t __a
, float32x4_t __b
)
17484 return __builtin_mve_vcmpgeq_fv4sf (__a
, __b
);
17487 __extension__
extern __inline mve_pred16_t
17488 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17489 __arm_vcmpeqq_n_f32 (float32x4_t __a
, float32_t __b
)
17491 return __builtin_mve_vcmpeqq_n_fv4sf (__a
, __b
);
17494 __extension__
extern __inline mve_pred16_t
17495 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17496 __arm_vcmpeqq_f32 (float32x4_t __a
, float32x4_t __b
)
17498 return __builtin_mve_vcmpeqq_fv4sf (__a
, __b
);
17501 __extension__
extern __inline float32x4_t
17502 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17503 __arm_vsubq_f32 (float32x4_t __a
, float32x4_t __b
)
17505 return __builtin_mve_vsubq_fv4sf (__a
, __b
);
17508 __extension__
extern __inline float32x4_t
17509 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17510 __arm_vorrq_f32 (float32x4_t __a
, float32x4_t __b
)
17512 return __builtin_mve_vorrq_fv4sf (__a
, __b
);
17515 __extension__
extern __inline float32x4_t
17516 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17517 __arm_vornq_f32 (float32x4_t __a
, float32x4_t __b
)
17519 return __builtin_mve_vornq_fv4sf (__a
, __b
);
17522 __extension__
extern __inline float32x4_t
17523 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17524 __arm_vmulq_n_f32 (float32x4_t __a
, float32_t __b
)
17526 return __builtin_mve_vmulq_n_fv4sf (__a
, __b
);
17529 __extension__
extern __inline float32x4_t
17530 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17531 __arm_vmulq_f32 (float32x4_t __a
, float32x4_t __b
)
17533 return __builtin_mve_vmulq_fv4sf (__a
, __b
);
17536 __extension__
extern __inline float32_t
17537 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17538 __arm_vminnmvq_f32 (float32_t __a
, float32x4_t __b
)
17540 return __builtin_mve_vminnmvq_fv4sf (__a
, __b
);
17543 __extension__
extern __inline float32x4_t
17544 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17545 __arm_vminnmq_f32 (float32x4_t __a
, float32x4_t __b
)
17547 return __builtin_mve_vminnmq_fv4sf (__a
, __b
);
17550 __extension__
extern __inline float32_t
17551 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17552 __arm_vminnmavq_f32 (float32_t __a
, float32x4_t __b
)
17554 return __builtin_mve_vminnmavq_fv4sf (__a
, __b
);
17557 __extension__
extern __inline float32x4_t
17558 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17559 __arm_vminnmaq_f32 (float32x4_t __a
, float32x4_t __b
)
17561 return __builtin_mve_vminnmaq_fv4sf (__a
, __b
);
17564 __extension__
extern __inline float32_t
17565 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17566 __arm_vmaxnmvq_f32 (float32_t __a
, float32x4_t __b
)
17568 return __builtin_mve_vmaxnmvq_fv4sf (__a
, __b
);
17571 __extension__
extern __inline float32x4_t
17572 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17573 __arm_vmaxnmq_f32 (float32x4_t __a
, float32x4_t __b
)
17575 return __builtin_mve_vmaxnmq_fv4sf (__a
, __b
);
17578 __extension__
extern __inline float32_t
17579 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17580 __arm_vmaxnmavq_f32 (float32_t __a
, float32x4_t __b
)
17582 return __builtin_mve_vmaxnmavq_fv4sf (__a
, __b
);
17585 __extension__
extern __inline float32x4_t
17586 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17587 __arm_vmaxnmaq_f32 (float32x4_t __a
, float32x4_t __b
)
17589 return __builtin_mve_vmaxnmaq_fv4sf (__a
, __b
);
17592 __extension__
extern __inline float32x4_t
17593 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17594 __arm_veorq_f32 (float32x4_t __a
, float32x4_t __b
)
17596 return __builtin_mve_veorq_fv4sf (__a
, __b
);
17599 __extension__
extern __inline float32x4_t
17600 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17601 __arm_vcmulq_rot90_f32 (float32x4_t __a
, float32x4_t __b
)
17603 return __builtin_mve_vcmulq_rot90v4sf (__a
, __b
);
17606 __extension__
extern __inline float32x4_t
17607 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17608 __arm_vcmulq_rot270_f32 (float32x4_t __a
, float32x4_t __b
)
17610 return __builtin_mve_vcmulq_rot270v4sf (__a
, __b
);
17613 __extension__
extern __inline float32x4_t
17614 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17615 __arm_vcmulq_rot180_f32 (float32x4_t __a
, float32x4_t __b
)
17617 return __builtin_mve_vcmulq_rot180v4sf (__a
, __b
);
17620 __extension__
extern __inline float32x4_t
17621 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17622 __arm_vcmulq_f32 (float32x4_t __a
, float32x4_t __b
)
17624 return __builtin_mve_vcmulqv4sf (__a
, __b
);
17627 __extension__
extern __inline float32x4_t
17628 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17629 __arm_vcaddq_rot90_f32 (float32x4_t __a
, float32x4_t __b
)
17631 return __builtin_mve_vcaddq_rot90v4sf (__a
, __b
);
17634 __extension__
extern __inline float32x4_t
17635 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17636 __arm_vcaddq_rot270_f32 (float32x4_t __a
, float32x4_t __b
)
17638 return __builtin_mve_vcaddq_rot270v4sf (__a
, __b
);
17641 __extension__
extern __inline float32x4_t
17642 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17643 __arm_vbicq_f32 (float32x4_t __a
, float32x4_t __b
)
17645 return __builtin_mve_vbicq_fv4sf (__a
, __b
);
17648 __extension__
extern __inline float32x4_t
17649 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17650 __arm_vandq_f32 (float32x4_t __a
, float32x4_t __b
)
17652 return __builtin_mve_vandq_fv4sf (__a
, __b
);
17655 __extension__
extern __inline float32x4_t
17656 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17657 __arm_vaddq_n_f32 (float32x4_t __a
, float32_t __b
)
17659 return __builtin_mve_vaddq_n_fv4sf (__a
, __b
);
17662 __extension__
extern __inline float32x4_t
17663 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17664 __arm_vabdq_f32 (float32x4_t __a
, float32x4_t __b
)
17666 return __builtin_mve_vabdq_fv4sf (__a
, __b
);
17669 __extension__
extern __inline float16x8_t
17670 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17671 __arm_vcvttq_f16_f32 (float16x8_t __a
, float32x4_t __b
)
17673 return __builtin_mve_vcvttq_f16_f32v8hf (__a
, __b
);
17676 __extension__
extern __inline float16x8_t
17677 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17678 __arm_vcvtbq_f16_f32 (float16x8_t __a
, float32x4_t __b
)
17680 return __builtin_mve_vcvtbq_f16_f32v8hf (__a
, __b
);
17683 __extension__
extern __inline mve_pred16_t
17684 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17685 __arm_vcmpeqq_m_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
17687 return __builtin_mve_vcmpeqq_m_fv8hf (__a
, __b
, __p
);
17690 __extension__
extern __inline mve_pred16_t
17691 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17692 __arm_vcmpeqq_m_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
17694 return __builtin_mve_vcmpeqq_m_fv4sf (__a
, __b
, __p
);
17697 __extension__
extern __inline int16x8_t
17698 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17699 __arm_vcvtaq_m_s16_f16 (int16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17701 return __builtin_mve_vcvtaq_m_sv8hi (__inactive
, __a
, __p
);
17704 __extension__
extern __inline uint16x8_t
17705 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17706 __arm_vcvtaq_m_u16_f16 (uint16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17708 return __builtin_mve_vcvtaq_m_uv8hi (__inactive
, __a
, __p
);
17711 __extension__
extern __inline int32x4_t
17712 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17713 __arm_vcvtaq_m_s32_f32 (int32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
17715 return __builtin_mve_vcvtaq_m_sv4si (__inactive
, __a
, __p
);
17718 __extension__
extern __inline uint32x4_t
17719 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17720 __arm_vcvtaq_m_u32_f32 (uint32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
17722 return __builtin_mve_vcvtaq_m_uv4si (__inactive
, __a
, __p
);
17725 __extension__
extern __inline float16x8_t
17726 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17727 __arm_vcvtq_m_f16_s16 (float16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
17729 return __builtin_mve_vcvtq_m_to_f_sv8hf (__inactive
, __a
, __p
);
17732 __extension__
extern __inline float16x8_t
17733 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17734 __arm_vcvtq_m_f16_u16 (float16x8_t __inactive
, uint16x8_t __a
, mve_pred16_t __p
)
17736 return __builtin_mve_vcvtq_m_to_f_uv8hf (__inactive
, __a
, __p
);
17739 __extension__
extern __inline float32x4_t
17740 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17741 __arm_vcvtq_m_f32_s32 (float32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
17743 return __builtin_mve_vcvtq_m_to_f_sv4sf (__inactive
, __a
, __p
);
17746 __extension__
extern __inline float32x4_t
17747 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17748 __arm_vcvtq_m_f32_u32 (float32x4_t __inactive
, uint32x4_t __a
, mve_pred16_t __p
)
17750 return __builtin_mve_vcvtq_m_to_f_uv4sf (__inactive
, __a
, __p
);
17754 __extension__
extern __inline float16x8_t
17755 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17756 __arm_vcvtbq_m_f16_f32 (float16x8_t __a
, float32x4_t __b
, mve_pred16_t __p
)
17758 return __builtin_mve_vcvtbq_m_f16_f32v8hf (__a
, __b
, __p
);
17761 __extension__
extern __inline float32x4_t
17762 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17763 __arm_vcvtbq_m_f32_f16 (float32x4_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17765 return __builtin_mve_vcvtbq_m_f32_f16v4sf (__inactive
, __a
, __p
);
17768 __extension__
extern __inline float16x8_t
17769 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17770 __arm_vcvttq_m_f16_f32 (float16x8_t __a
, float32x4_t __b
, mve_pred16_t __p
)
17772 return __builtin_mve_vcvttq_m_f16_f32v8hf (__a
, __b
, __p
);
17775 __extension__
extern __inline float32x4_t
17776 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17777 __arm_vcvttq_m_f32_f16 (float32x4_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17779 return __builtin_mve_vcvttq_m_f32_f16v4sf (__inactive
, __a
, __p
);
17782 __extension__
extern __inline float16x8_t
17783 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17784 __arm_vrev32q_m_f16 (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17786 return __builtin_mve_vrev32q_m_fv8hf (__inactive
, __a
, __p
);
17789 __extension__
extern __inline float16x8_t
17790 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17791 __arm_vcmlaq_f16 (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
)
17793 return __builtin_mve_vcmlaqv8hf (__a
, __b
, __c
);
17796 __extension__
extern __inline float16x8_t
17797 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17798 __arm_vcmlaq_rot180_f16 (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
)
17800 return __builtin_mve_vcmlaq_rot180v8hf (__a
, __b
, __c
);
17803 __extension__
extern __inline float16x8_t
17804 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17805 __arm_vcmlaq_rot270_f16 (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
)
17807 return __builtin_mve_vcmlaq_rot270v8hf (__a
, __b
, __c
);
17810 __extension__
extern __inline float16x8_t
17811 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17812 __arm_vcmlaq_rot90_f16 (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
)
17814 return __builtin_mve_vcmlaq_rot90v8hf (__a
, __b
, __c
);
17817 __extension__
extern __inline float16x8_t
17818 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17819 __arm_vfmaq_f16 (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
)
17821 return __builtin_mve_vfmaq_fv8hf (__a
, __b
, __c
);
17824 __extension__
extern __inline float16x8_t
17825 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17826 __arm_vfmaq_n_f16 (float16x8_t __a
, float16x8_t __b
, float16_t __c
)
17828 return __builtin_mve_vfmaq_n_fv8hf (__a
, __b
, __c
);
17831 __extension__
extern __inline float16x8_t
17832 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17833 __arm_vfmasq_n_f16 (float16x8_t __a
, float16x8_t __b
, float16_t __c
)
17835 return __builtin_mve_vfmasq_n_fv8hf (__a
, __b
, __c
);
17838 __extension__
extern __inline float16x8_t
17839 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17840 __arm_vfmsq_f16 (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
)
17842 return __builtin_mve_vfmsq_fv8hf (__a
, __b
, __c
);
17845 __extension__
extern __inline float16x8_t
17846 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17847 __arm_vabsq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17849 return __builtin_mve_vabsq_m_fv8hf (__inactive
, __a
, __p
);
17852 __extension__
extern __inline int16x8_t
17853 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17854 __arm_vcvtmq_m_s16_f16 (int16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17856 return __builtin_mve_vcvtmq_m_sv8hi (__inactive
, __a
, __p
);
17859 __extension__
extern __inline int16x8_t
17860 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17861 __arm_vcvtnq_m_s16_f16 (int16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17863 return __builtin_mve_vcvtnq_m_sv8hi (__inactive
, __a
, __p
);
17866 __extension__
extern __inline int16x8_t
17867 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17868 __arm_vcvtpq_m_s16_f16 (int16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17870 return __builtin_mve_vcvtpq_m_sv8hi (__inactive
, __a
, __p
);
17873 __extension__
extern __inline int16x8_t
17874 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17875 __arm_vcvtq_m_s16_f16 (int16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17877 return __builtin_mve_vcvtq_m_from_f_sv8hi (__inactive
, __a
, __p
);
17880 __extension__
extern __inline float16x8_t
17881 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17882 __arm_vdupq_m_n_f16 (float16x8_t __inactive
, float16_t __a
, mve_pred16_t __p
)
17884 return __builtin_mve_vdupq_m_n_fv8hf (__inactive
, __a
, __p
);
17887 __extension__
extern __inline float16x8_t
17888 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17889 __arm_vmaxnmaq_m_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
17891 return __builtin_mve_vmaxnmaq_m_fv8hf (__a
, __b
, __p
);
17894 __extension__
extern __inline float16_t
17895 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17896 __arm_vmaxnmavq_p_f16 (float16_t __a
, float16x8_t __b
, mve_pred16_t __p
)
17898 return __builtin_mve_vmaxnmavq_p_fv8hf (__a
, __b
, __p
);
17901 __extension__
extern __inline float16_t
17902 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17903 __arm_vmaxnmvq_p_f16 (float16_t __a
, float16x8_t __b
, mve_pred16_t __p
)
17905 return __builtin_mve_vmaxnmvq_p_fv8hf (__a
, __b
, __p
);
17908 __extension__
extern __inline float16x8_t
17909 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17910 __arm_vminnmaq_m_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
17912 return __builtin_mve_vminnmaq_m_fv8hf (__a
, __b
, __p
);
17915 __extension__
extern __inline float16_t
17916 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17917 __arm_vminnmavq_p_f16 (float16_t __a
, float16x8_t __b
, mve_pred16_t __p
)
17919 return __builtin_mve_vminnmavq_p_fv8hf (__a
, __b
, __p
);
17922 __extension__
extern __inline float16_t
17923 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17924 __arm_vminnmvq_p_f16 (float16_t __a
, float16x8_t __b
, mve_pred16_t __p
)
17926 return __builtin_mve_vminnmvq_p_fv8hf (__a
, __b
, __p
);
17929 __extension__
extern __inline float16x8_t
17930 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17931 __arm_vnegq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17933 return __builtin_mve_vnegq_m_fv8hf (__inactive
, __a
, __p
);
17936 __extension__
extern __inline float16x8_t
17937 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17938 __arm_vpselq_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
17940 return __builtin_mve_vpselq_fv8hf (__a
, __b
, __p
);
17943 __extension__
extern __inline float16x8_t
17944 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17945 __arm_vrev64q_m_f16 (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17947 return __builtin_mve_vrev64q_m_fv8hf (__inactive
, __a
, __p
);
17950 __extension__
extern __inline float16x8_t
17951 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17952 __arm_vrndaq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17954 return __builtin_mve_vrndaq_m_fv8hf (__inactive
, __a
, __p
);
17957 __extension__
extern __inline float16x8_t
17958 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17959 __arm_vrndmq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17961 return __builtin_mve_vrndmq_m_fv8hf (__inactive
, __a
, __p
);
17964 __extension__
extern __inline float16x8_t
17965 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17966 __arm_vrndnq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17968 return __builtin_mve_vrndnq_m_fv8hf (__inactive
, __a
, __p
);
17971 __extension__
extern __inline float16x8_t
17972 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17973 __arm_vrndpq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17975 return __builtin_mve_vrndpq_m_fv8hf (__inactive
, __a
, __p
);
17978 __extension__
extern __inline float16x8_t
17979 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17980 __arm_vrndq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17982 return __builtin_mve_vrndq_m_fv8hf (__inactive
, __a
, __p
);
17985 __extension__
extern __inline float16x8_t
17986 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17987 __arm_vrndxq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
17989 return __builtin_mve_vrndxq_m_fv8hf (__inactive
, __a
, __p
);
17992 __extension__
extern __inline mve_pred16_t
17993 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
17994 __arm_vcmpeqq_m_n_f16 (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
17996 return __builtin_mve_vcmpeqq_m_n_fv8hf (__a
, __b
, __p
);
17999 __extension__
extern __inline mve_pred16_t
18000 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18001 __arm_vcmpgeq_m_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18003 return __builtin_mve_vcmpgeq_m_fv8hf (__a
, __b
, __p
);
18006 __extension__
extern __inline mve_pred16_t
18007 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18008 __arm_vcmpgeq_m_n_f16 (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
18010 return __builtin_mve_vcmpgeq_m_n_fv8hf (__a
, __b
, __p
);
18013 __extension__
extern __inline mve_pred16_t
18014 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18015 __arm_vcmpgtq_m_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18017 return __builtin_mve_vcmpgtq_m_fv8hf (__a
, __b
, __p
);
18020 __extension__
extern __inline mve_pred16_t
18021 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18022 __arm_vcmpgtq_m_n_f16 (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
18024 return __builtin_mve_vcmpgtq_m_n_fv8hf (__a
, __b
, __p
);
18027 __extension__
extern __inline mve_pred16_t
18028 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18029 __arm_vcmpleq_m_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18031 return __builtin_mve_vcmpleq_m_fv8hf (__a
, __b
, __p
);
18034 __extension__
extern __inline mve_pred16_t
18035 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18036 __arm_vcmpleq_m_n_f16 (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
18038 return __builtin_mve_vcmpleq_m_n_fv8hf (__a
, __b
, __p
);
18041 __extension__
extern __inline mve_pred16_t
18042 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18043 __arm_vcmpltq_m_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18045 return __builtin_mve_vcmpltq_m_fv8hf (__a
, __b
, __p
);
18048 __extension__
extern __inline mve_pred16_t
18049 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18050 __arm_vcmpltq_m_n_f16 (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
18052 return __builtin_mve_vcmpltq_m_n_fv8hf (__a
, __b
, __p
);
18055 __extension__
extern __inline mve_pred16_t
18056 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18057 __arm_vcmpneq_m_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18059 return __builtin_mve_vcmpneq_m_fv8hf (__a
, __b
, __p
);
18062 __extension__
extern __inline mve_pred16_t
18063 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18064 __arm_vcmpneq_m_n_f16 (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
18066 return __builtin_mve_vcmpneq_m_n_fv8hf (__a
, __b
, __p
);
18069 __extension__
extern __inline uint16x8_t
18070 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18071 __arm_vcvtmq_m_u16_f16 (uint16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
18073 return __builtin_mve_vcvtmq_m_uv8hi (__inactive
, __a
, __p
);
18076 __extension__
extern __inline uint16x8_t
18077 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18078 __arm_vcvtnq_m_u16_f16 (uint16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
18080 return __builtin_mve_vcvtnq_m_uv8hi (__inactive
, __a
, __p
);
18083 __extension__
extern __inline uint16x8_t
18084 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18085 __arm_vcvtpq_m_u16_f16 (uint16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
18087 return __builtin_mve_vcvtpq_m_uv8hi (__inactive
, __a
, __p
);
18090 __extension__
extern __inline uint16x8_t
18091 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18092 __arm_vcvtq_m_u16_f16 (uint16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
18094 return __builtin_mve_vcvtq_m_from_f_uv8hi (__inactive
, __a
, __p
);
18097 __extension__
extern __inline float32x4_t
18098 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18099 __arm_vcmlaq_f32 (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
)
18101 return __builtin_mve_vcmlaqv4sf (__a
, __b
, __c
);
18104 __extension__
extern __inline float32x4_t
18105 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18106 __arm_vcmlaq_rot180_f32 (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
)
18108 return __builtin_mve_vcmlaq_rot180v4sf (__a
, __b
, __c
);
18111 __extension__
extern __inline float32x4_t
18112 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18113 __arm_vcmlaq_rot270_f32 (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
)
18115 return __builtin_mve_vcmlaq_rot270v4sf (__a
, __b
, __c
);
18118 __extension__
extern __inline float32x4_t
18119 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18120 __arm_vcmlaq_rot90_f32 (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
)
18122 return __builtin_mve_vcmlaq_rot90v4sf (__a
, __b
, __c
);
18125 __extension__
extern __inline float32x4_t
18126 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18127 __arm_vfmaq_f32 (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
)
18129 return __builtin_mve_vfmaq_fv4sf (__a
, __b
, __c
);
18132 __extension__
extern __inline float32x4_t
18133 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18134 __arm_vfmaq_n_f32 (float32x4_t __a
, float32x4_t __b
, float32_t __c
)
18136 return __builtin_mve_vfmaq_n_fv4sf (__a
, __b
, __c
);
18139 __extension__
extern __inline float32x4_t
18140 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18141 __arm_vfmasq_n_f32 (float32x4_t __a
, float32x4_t __b
, float32_t __c
)
18143 return __builtin_mve_vfmasq_n_fv4sf (__a
, __b
, __c
);
18146 __extension__
extern __inline float32x4_t
18147 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18148 __arm_vfmsq_f32 (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
)
18150 return __builtin_mve_vfmsq_fv4sf (__a
, __b
, __c
);
18153 __extension__
extern __inline float32x4_t
18154 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18155 __arm_vabsq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
18157 return __builtin_mve_vabsq_m_fv4sf (__inactive
, __a
, __p
);
18160 __extension__
extern __inline int32x4_t
18161 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18162 __arm_vcvtmq_m_s32_f32 (int32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
18164 return __builtin_mve_vcvtmq_m_sv4si (__inactive
, __a
, __p
);
18167 __extension__
extern __inline int32x4_t
18168 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18169 __arm_vcvtnq_m_s32_f32 (int32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
18171 return __builtin_mve_vcvtnq_m_sv4si (__inactive
, __a
, __p
);
18174 __extension__
extern __inline int32x4_t
18175 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18176 __arm_vcvtpq_m_s32_f32 (int32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
18178 return __builtin_mve_vcvtpq_m_sv4si (__inactive
, __a
, __p
);
18181 __extension__
extern __inline int32x4_t
18182 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18183 __arm_vcvtq_m_s32_f32 (int32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
18185 return __builtin_mve_vcvtq_m_from_f_sv4si (__inactive
, __a
, __p
);
18188 __extension__
extern __inline float32x4_t
18189 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18190 __arm_vdupq_m_n_f32 (float32x4_t __inactive
, float32_t __a
, mve_pred16_t __p
)
18192 return __builtin_mve_vdupq_m_n_fv4sf (__inactive
, __a
, __p
);
18195 __extension__
extern __inline float32x4_t
18196 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18197 __arm_vmaxnmaq_m_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18199 return __builtin_mve_vmaxnmaq_m_fv4sf (__a
, __b
, __p
);
18202 __extension__
extern __inline float32_t
18203 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18204 __arm_vmaxnmavq_p_f32 (float32_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18206 return __builtin_mve_vmaxnmavq_p_fv4sf (__a
, __b
, __p
);
18209 __extension__
extern __inline float32_t
18210 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18211 __arm_vmaxnmvq_p_f32 (float32_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18213 return __builtin_mve_vmaxnmvq_p_fv4sf (__a
, __b
, __p
);
18216 __extension__
extern __inline float32x4_t
18217 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18218 __arm_vminnmaq_m_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18220 return __builtin_mve_vminnmaq_m_fv4sf (__a
, __b
, __p
);
18223 __extension__
extern __inline float32_t
18224 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18225 __arm_vminnmavq_p_f32 (float32_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18227 return __builtin_mve_vminnmavq_p_fv4sf (__a
, __b
, __p
);
18230 __extension__
extern __inline float32_t
18231 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18232 __arm_vminnmvq_p_f32 (float32_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18234 return __builtin_mve_vminnmvq_p_fv4sf (__a
, __b
, __p
);
18237 __extension__
extern __inline float32x4_t
18238 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18239 __arm_vnegq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
18241 return __builtin_mve_vnegq_m_fv4sf (__inactive
, __a
, __p
);
18244 __extension__
extern __inline float32x4_t
18245 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18246 __arm_vpselq_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18248 return __builtin_mve_vpselq_fv4sf (__a
, __b
, __p
);
18251 __extension__
extern __inline float32x4_t
18252 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18253 __arm_vrev64q_m_f32 (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
18255 return __builtin_mve_vrev64q_m_fv4sf (__inactive
, __a
, __p
);
18258 __extension__
extern __inline float32x4_t
18259 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18260 __arm_vrndaq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
18262 return __builtin_mve_vrndaq_m_fv4sf (__inactive
, __a
, __p
);
18265 __extension__
extern __inline float32x4_t
18266 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18267 __arm_vrndmq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
18269 return __builtin_mve_vrndmq_m_fv4sf (__inactive
, __a
, __p
);
18272 __extension__
extern __inline float32x4_t
18273 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18274 __arm_vrndnq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
18276 return __builtin_mve_vrndnq_m_fv4sf (__inactive
, __a
, __p
);
18279 __extension__
extern __inline float32x4_t
18280 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18281 __arm_vrndpq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
18283 return __builtin_mve_vrndpq_m_fv4sf (__inactive
, __a
, __p
);
18286 __extension__
extern __inline float32x4_t
18287 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18288 __arm_vrndq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
18290 return __builtin_mve_vrndq_m_fv4sf (__inactive
, __a
, __p
);
18293 __extension__
extern __inline float32x4_t
18294 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18295 __arm_vrndxq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
18297 return __builtin_mve_vrndxq_m_fv4sf (__inactive
, __a
, __p
);
18300 __extension__
extern __inline mve_pred16_t
18301 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18302 __arm_vcmpeqq_m_n_f32 (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
18304 return __builtin_mve_vcmpeqq_m_n_fv4sf (__a
, __b
, __p
);
18307 __extension__
extern __inline mve_pred16_t
18308 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18309 __arm_vcmpgeq_m_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18311 return __builtin_mve_vcmpgeq_m_fv4sf (__a
, __b
, __p
);
18314 __extension__
extern __inline mve_pred16_t
18315 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18316 __arm_vcmpgeq_m_n_f32 (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
18318 return __builtin_mve_vcmpgeq_m_n_fv4sf (__a
, __b
, __p
);
18321 __extension__
extern __inline mve_pred16_t
18322 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18323 __arm_vcmpgtq_m_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18325 return __builtin_mve_vcmpgtq_m_fv4sf (__a
, __b
, __p
);
18328 __extension__
extern __inline mve_pred16_t
18329 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18330 __arm_vcmpgtq_m_n_f32 (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
18332 return __builtin_mve_vcmpgtq_m_n_fv4sf (__a
, __b
, __p
);
18335 __extension__
extern __inline mve_pred16_t
18336 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18337 __arm_vcmpleq_m_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18339 return __builtin_mve_vcmpleq_m_fv4sf (__a
, __b
, __p
);
18342 __extension__
extern __inline mve_pred16_t
18343 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18344 __arm_vcmpleq_m_n_f32 (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
18346 return __builtin_mve_vcmpleq_m_n_fv4sf (__a
, __b
, __p
);
18349 __extension__
extern __inline mve_pred16_t
18350 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18351 __arm_vcmpltq_m_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18353 return __builtin_mve_vcmpltq_m_fv4sf (__a
, __b
, __p
);
18356 __extension__
extern __inline mve_pred16_t
18357 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18358 __arm_vcmpltq_m_n_f32 (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
18360 return __builtin_mve_vcmpltq_m_n_fv4sf (__a
, __b
, __p
);
18363 __extension__
extern __inline mve_pred16_t
18364 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18365 __arm_vcmpneq_m_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18367 return __builtin_mve_vcmpneq_m_fv4sf (__a
, __b
, __p
);
18370 __extension__
extern __inline mve_pred16_t
18371 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18372 __arm_vcmpneq_m_n_f32 (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
18374 return __builtin_mve_vcmpneq_m_n_fv4sf (__a
, __b
, __p
);
18377 __extension__
extern __inline uint32x4_t
18378 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18379 __arm_vcvtmq_m_u32_f32 (uint32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
18381 return __builtin_mve_vcvtmq_m_uv4si (__inactive
, __a
, __p
);
18384 __extension__
extern __inline uint32x4_t
18385 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18386 __arm_vcvtnq_m_u32_f32 (uint32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
18388 return __builtin_mve_vcvtnq_m_uv4si (__inactive
, __a
, __p
);
18391 __extension__
extern __inline uint32x4_t
18392 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18393 __arm_vcvtpq_m_u32_f32 (uint32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
18395 return __builtin_mve_vcvtpq_m_uv4si (__inactive
, __a
, __p
);
18398 __extension__
extern __inline uint32x4_t
18399 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18400 __arm_vcvtq_m_u32_f32 (uint32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
18402 return __builtin_mve_vcvtq_m_from_f_uv4si (__inactive
, __a
, __p
);
18405 __extension__
extern __inline float16x8_t
18406 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18407 __arm_vcvtq_m_n_f16_u16 (float16x8_t __inactive
, uint16x8_t __a
, const int __imm6
, mve_pred16_t __p
)
18409 return __builtin_mve_vcvtq_m_n_to_f_uv8hf (__inactive
, __a
, __imm6
, __p
);
18412 __extension__
extern __inline float16x8_t
18413 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18414 __arm_vcvtq_m_n_f16_s16 (float16x8_t __inactive
, int16x8_t __a
, const int __imm6
, mve_pred16_t __p
)
18416 return __builtin_mve_vcvtq_m_n_to_f_sv8hf (__inactive
, __a
, __imm6
, __p
);
18419 __extension__
extern __inline float32x4_t
18420 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18421 __arm_vcvtq_m_n_f32_u32 (float32x4_t __inactive
, uint32x4_t __a
, const int __imm6
, mve_pred16_t __p
)
18423 return __builtin_mve_vcvtq_m_n_to_f_uv4sf (__inactive
, __a
, __imm6
, __p
);
18426 __extension__
extern __inline float32x4_t
18427 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18428 __arm_vcvtq_m_n_f32_s32 (float32x4_t __inactive
, int32x4_t __a
, const int __imm6
, mve_pred16_t __p
)
18430 return __builtin_mve_vcvtq_m_n_to_f_sv4sf (__inactive
, __a
, __imm6
, __p
);
18433 __extension__
extern __inline float32x4_t
18434 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18435 __arm_vabdq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18437 return __builtin_mve_vabdq_m_fv4sf (__inactive
, __a
, __b
, __p
);
18440 __extension__
extern __inline float16x8_t
18441 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18442 __arm_vabdq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18444 return __builtin_mve_vabdq_m_fv8hf (__inactive
, __a
, __b
, __p
);
18447 __extension__
extern __inline float32x4_t
18448 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18449 __arm_vaddq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18451 return __builtin_mve_vaddq_m_fv4sf (__inactive
, __a
, __b
, __p
);
18454 __extension__
extern __inline float16x8_t
18455 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18456 __arm_vaddq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18458 return __builtin_mve_vaddq_m_fv8hf (__inactive
, __a
, __b
, __p
);
18461 __extension__
extern __inline float32x4_t
18462 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18463 __arm_vaddq_m_n_f32 (float32x4_t __inactive
, float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
18465 return __builtin_mve_vaddq_m_n_fv4sf (__inactive
, __a
, __b
, __p
);
18468 __extension__
extern __inline float16x8_t
18469 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18470 __arm_vaddq_m_n_f16 (float16x8_t __inactive
, float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
18472 return __builtin_mve_vaddq_m_n_fv8hf (__inactive
, __a
, __b
, __p
);
18475 __extension__
extern __inline float32x4_t
18476 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18477 __arm_vandq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18479 return __builtin_mve_vandq_m_fv4sf (__inactive
, __a
, __b
, __p
);
18482 __extension__
extern __inline float16x8_t
18483 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18484 __arm_vandq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18486 return __builtin_mve_vandq_m_fv8hf (__inactive
, __a
, __b
, __p
);
18489 __extension__
extern __inline float32x4_t
18490 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18491 __arm_vbicq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18493 return __builtin_mve_vbicq_m_fv4sf (__inactive
, __a
, __b
, __p
);
18496 __extension__
extern __inline float16x8_t
18497 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18498 __arm_vbicq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18500 return __builtin_mve_vbicq_m_fv8hf (__inactive
, __a
, __b
, __p
);
18503 __extension__
extern __inline float32x4_t
18504 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18505 __arm_vbrsrq_m_n_f32 (float32x4_t __inactive
, float32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
18507 return __builtin_mve_vbrsrq_m_n_fv4sf (__inactive
, __a
, __b
, __p
);
18510 __extension__
extern __inline float16x8_t
18511 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18512 __arm_vbrsrq_m_n_f16 (float16x8_t __inactive
, float16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
18514 return __builtin_mve_vbrsrq_m_n_fv8hf (__inactive
, __a
, __b
, __p
);
18517 __extension__
extern __inline float32x4_t
18518 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18519 __arm_vcaddq_rot270_m_f32 (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18521 return __builtin_mve_vcaddq_rot270_m_fv4sf (__inactive
, __a
, __b
, __p
);
18524 __extension__
extern __inline float16x8_t
18525 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18526 __arm_vcaddq_rot270_m_f16 (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18528 return __builtin_mve_vcaddq_rot270_m_fv8hf (__inactive
, __a
, __b
, __p
);
18531 __extension__
extern __inline float32x4_t
18532 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18533 __arm_vcaddq_rot90_m_f32 (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18535 return __builtin_mve_vcaddq_rot90_m_fv4sf (__inactive
, __a
, __b
, __p
);
18538 __extension__
extern __inline float16x8_t
18539 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18540 __arm_vcaddq_rot90_m_f16 (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18542 return __builtin_mve_vcaddq_rot90_m_fv8hf (__inactive
, __a
, __b
, __p
);
18545 __extension__
extern __inline float32x4_t
18546 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18547 __arm_vcmlaq_m_f32 (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
, mve_pred16_t __p
)
18549 return __builtin_mve_vcmlaq_m_fv4sf (__a
, __b
, __c
, __p
);
18552 __extension__
extern __inline float16x8_t
18553 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18554 __arm_vcmlaq_m_f16 (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
, mve_pred16_t __p
)
18556 return __builtin_mve_vcmlaq_m_fv8hf (__a
, __b
, __c
, __p
);
18559 __extension__
extern __inline float32x4_t
18560 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18561 __arm_vcmlaq_rot180_m_f32 (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
, mve_pred16_t __p
)
18563 return __builtin_mve_vcmlaq_rot180_m_fv4sf (__a
, __b
, __c
, __p
);
18566 __extension__
extern __inline float16x8_t
18567 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18568 __arm_vcmlaq_rot180_m_f16 (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
, mve_pred16_t __p
)
18570 return __builtin_mve_vcmlaq_rot180_m_fv8hf (__a
, __b
, __c
, __p
);
18573 __extension__
extern __inline float32x4_t
18574 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18575 __arm_vcmlaq_rot270_m_f32 (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
, mve_pred16_t __p
)
18577 return __builtin_mve_vcmlaq_rot270_m_fv4sf (__a
, __b
, __c
, __p
);
18580 __extension__
extern __inline float16x8_t
18581 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18582 __arm_vcmlaq_rot270_m_f16 (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
, mve_pred16_t __p
)
18584 return __builtin_mve_vcmlaq_rot270_m_fv8hf (__a
, __b
, __c
, __p
);
18587 __extension__
extern __inline float32x4_t
18588 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18589 __arm_vcmlaq_rot90_m_f32 (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
, mve_pred16_t __p
)
18591 return __builtin_mve_vcmlaq_rot90_m_fv4sf (__a
, __b
, __c
, __p
);
18594 __extension__
extern __inline float16x8_t
18595 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18596 __arm_vcmlaq_rot90_m_f16 (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
, mve_pred16_t __p
)
18598 return __builtin_mve_vcmlaq_rot90_m_fv8hf (__a
, __b
, __c
, __p
);
18601 __extension__
extern __inline float32x4_t
18602 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18603 __arm_vcmulq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18605 return __builtin_mve_vcmulq_m_fv4sf (__inactive
, __a
, __b
, __p
);
18608 __extension__
extern __inline float16x8_t
18609 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18610 __arm_vcmulq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18612 return __builtin_mve_vcmulq_m_fv8hf (__inactive
, __a
, __b
, __p
);
18615 __extension__
extern __inline float32x4_t
18616 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18617 __arm_vcmulq_rot180_m_f32 (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18619 return __builtin_mve_vcmulq_rot180_m_fv4sf (__inactive
, __a
, __b
, __p
);
18622 __extension__
extern __inline float16x8_t
18623 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18624 __arm_vcmulq_rot180_m_f16 (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18626 return __builtin_mve_vcmulq_rot180_m_fv8hf (__inactive
, __a
, __b
, __p
);
18629 __extension__
extern __inline float32x4_t
18630 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18631 __arm_vcmulq_rot270_m_f32 (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18633 return __builtin_mve_vcmulq_rot270_m_fv4sf (__inactive
, __a
, __b
, __p
);
18636 __extension__
extern __inline float16x8_t
18637 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18638 __arm_vcmulq_rot270_m_f16 (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18640 return __builtin_mve_vcmulq_rot270_m_fv8hf (__inactive
, __a
, __b
, __p
);
18643 __extension__
extern __inline float32x4_t
18644 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18645 __arm_vcmulq_rot90_m_f32 (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18647 return __builtin_mve_vcmulq_rot90_m_fv4sf (__inactive
, __a
, __b
, __p
);
18650 __extension__
extern __inline float16x8_t
18651 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18652 __arm_vcmulq_rot90_m_f16 (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18654 return __builtin_mve_vcmulq_rot90_m_fv8hf (__inactive
, __a
, __b
, __p
);
18657 __extension__
extern __inline int32x4_t
18658 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18659 __arm_vcvtq_m_n_s32_f32 (int32x4_t __inactive
, float32x4_t __a
, const int __imm6
, mve_pred16_t __p
)
18661 return __builtin_mve_vcvtq_m_n_from_f_sv4si (__inactive
, __a
, __imm6
, __p
);
18664 __extension__
extern __inline int16x8_t
18665 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18666 __arm_vcvtq_m_n_s16_f16 (int16x8_t __inactive
, float16x8_t __a
, const int __imm6
, mve_pred16_t __p
)
18668 return __builtin_mve_vcvtq_m_n_from_f_sv8hi (__inactive
, __a
, __imm6
, __p
);
18671 __extension__
extern __inline uint32x4_t
18672 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18673 __arm_vcvtq_m_n_u32_f32 (uint32x4_t __inactive
, float32x4_t __a
, const int __imm6
, mve_pred16_t __p
)
18675 return __builtin_mve_vcvtq_m_n_from_f_uv4si (__inactive
, __a
, __imm6
, __p
);
18678 __extension__
extern __inline uint16x8_t
18679 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18680 __arm_vcvtq_m_n_u16_f16 (uint16x8_t __inactive
, float16x8_t __a
, const int __imm6
, mve_pred16_t __p
)
18682 return __builtin_mve_vcvtq_m_n_from_f_uv8hi (__inactive
, __a
, __imm6
, __p
);
18685 __extension__
extern __inline float32x4_t
18686 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18687 __arm_veorq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18689 return __builtin_mve_veorq_m_fv4sf (__inactive
, __a
, __b
, __p
);
18692 __extension__
extern __inline float16x8_t
18693 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18694 __arm_veorq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18696 return __builtin_mve_veorq_m_fv8hf (__inactive
, __a
, __b
, __p
);
18699 __extension__
extern __inline float32x4_t
18700 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18701 __arm_vfmaq_m_f32 (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
, mve_pred16_t __p
)
18703 return __builtin_mve_vfmaq_m_fv4sf (__a
, __b
, __c
, __p
);
18706 __extension__
extern __inline float16x8_t
18707 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18708 __arm_vfmaq_m_f16 (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
, mve_pred16_t __p
)
18710 return __builtin_mve_vfmaq_m_fv8hf (__a
, __b
, __c
, __p
);
18713 __extension__
extern __inline float32x4_t
18714 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18715 __arm_vfmaq_m_n_f32 (float32x4_t __a
, float32x4_t __b
, float32_t __c
, mve_pred16_t __p
)
18717 return __builtin_mve_vfmaq_m_n_fv4sf (__a
, __b
, __c
, __p
);
18720 __extension__
extern __inline float16x8_t
18721 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18722 __arm_vfmaq_m_n_f16 (float16x8_t __a
, float16x8_t __b
, float16_t __c
, mve_pred16_t __p
)
18724 return __builtin_mve_vfmaq_m_n_fv8hf (__a
, __b
, __c
, __p
);
18727 __extension__
extern __inline float32x4_t
18728 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18729 __arm_vfmasq_m_n_f32 (float32x4_t __a
, float32x4_t __b
, float32_t __c
, mve_pred16_t __p
)
18731 return __builtin_mve_vfmasq_m_n_fv4sf (__a
, __b
, __c
, __p
);
18734 __extension__
extern __inline float16x8_t
18735 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18736 __arm_vfmasq_m_n_f16 (float16x8_t __a
, float16x8_t __b
, float16_t __c
, mve_pred16_t __p
)
18738 return __builtin_mve_vfmasq_m_n_fv8hf (__a
, __b
, __c
, __p
);
18741 __extension__
extern __inline float32x4_t
18742 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18743 __arm_vfmsq_m_f32 (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
, mve_pred16_t __p
)
18745 return __builtin_mve_vfmsq_m_fv4sf (__a
, __b
, __c
, __p
);
18748 __extension__
extern __inline float16x8_t
18749 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18750 __arm_vfmsq_m_f16 (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
, mve_pred16_t __p
)
18752 return __builtin_mve_vfmsq_m_fv8hf (__a
, __b
, __c
, __p
);
18755 __extension__
extern __inline float32x4_t
18756 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18757 __arm_vmaxnmq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18759 return __builtin_mve_vmaxnmq_m_fv4sf (__inactive
, __a
, __b
, __p
);
18762 __extension__
extern __inline float16x8_t
18763 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18764 __arm_vmaxnmq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18766 return __builtin_mve_vmaxnmq_m_fv8hf (__inactive
, __a
, __b
, __p
);
18769 __extension__
extern __inline float32x4_t
18770 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18771 __arm_vminnmq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18773 return __builtin_mve_vminnmq_m_fv4sf (__inactive
, __a
, __b
, __p
);
18776 __extension__
extern __inline float16x8_t
18777 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18778 __arm_vminnmq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18780 return __builtin_mve_vminnmq_m_fv8hf (__inactive
, __a
, __b
, __p
);
18783 __extension__
extern __inline float32x4_t
18784 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18785 __arm_vmulq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18787 return __builtin_mve_vmulq_m_fv4sf (__inactive
, __a
, __b
, __p
);
18790 __extension__
extern __inline float16x8_t
18791 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18792 __arm_vmulq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18794 return __builtin_mve_vmulq_m_fv8hf (__inactive
, __a
, __b
, __p
);
18797 __extension__
extern __inline float32x4_t
18798 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18799 __arm_vmulq_m_n_f32 (float32x4_t __inactive
, float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
18801 return __builtin_mve_vmulq_m_n_fv4sf (__inactive
, __a
, __b
, __p
);
18804 __extension__
extern __inline float16x8_t
18805 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18806 __arm_vmulq_m_n_f16 (float16x8_t __inactive
, float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
18808 return __builtin_mve_vmulq_m_n_fv8hf (__inactive
, __a
, __b
, __p
);
18811 __extension__
extern __inline float32x4_t
18812 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18813 __arm_vornq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18815 return __builtin_mve_vornq_m_fv4sf (__inactive
, __a
, __b
, __p
);
18818 __extension__
extern __inline float16x8_t
18819 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18820 __arm_vornq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18822 return __builtin_mve_vornq_m_fv8hf (__inactive
, __a
, __b
, __p
);
18825 __extension__
extern __inline float32x4_t
18826 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18827 __arm_vorrq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18829 return __builtin_mve_vorrq_m_fv4sf (__inactive
, __a
, __b
, __p
);
18832 __extension__
extern __inline float16x8_t
18833 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18834 __arm_vorrq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18836 return __builtin_mve_vorrq_m_fv8hf (__inactive
, __a
, __b
, __p
);
18839 __extension__
extern __inline float32x4_t
18840 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18841 __arm_vsubq_m_f32 (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
18843 return __builtin_mve_vsubq_m_fv4sf (__inactive
, __a
, __b
, __p
);
18846 __extension__
extern __inline float16x8_t
18847 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18848 __arm_vsubq_m_f16 (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
18850 return __builtin_mve_vsubq_m_fv8hf (__inactive
, __a
, __b
, __p
);
18853 __extension__
extern __inline float32x4_t
18854 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18855 __arm_vsubq_m_n_f32 (float32x4_t __inactive
, float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
18857 return __builtin_mve_vsubq_m_n_fv4sf (__inactive
, __a
, __b
, __p
);
18860 __extension__
extern __inline float16x8_t
18861 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18862 __arm_vsubq_m_n_f16 (float16x8_t __inactive
, float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
18864 return __builtin_mve_vsubq_m_n_fv8hf (__inactive
, __a
, __b
, __p
);
18867 __extension__
extern __inline float32x4_t
18868 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18869 __arm_vld1q_f32 (float32_t
const * __base
)
18871 return __builtin_mve_vld1q_fv4sf((__builtin_neon_si
*) __base
);
18874 __extension__
extern __inline float16x8_t
18875 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18876 __arm_vld1q_f16 (float16_t
const * __base
)
18878 return __builtin_mve_vld1q_fv8hf((__builtin_neon_hi
*) __base
);
18881 __extension__
extern __inline float32x4_t
18882 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18883 __arm_vldrwq_f32 (float32_t
const * __base
)
18885 return __builtin_mve_vldrwq_fv4sf((__builtin_neon_si
*) __base
);
18888 __extension__
extern __inline float32x4_t
18889 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18890 __arm_vldrwq_z_f32 (float32_t
const * __base
, mve_pred16_t __p
)
18892 return __builtin_mve_vldrwq_z_fv4sf((__builtin_neon_si
*) __base
, __p
);
18895 __extension__
extern __inline float16x8_t
18896 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18897 __arm_vldrhq_z_f16 (float16_t
const * __base
, mve_pred16_t __p
)
18899 return __builtin_mve_vldrhq_z_fv8hf((__builtin_neon_hi
*) __base
, __p
);
18902 __extension__
extern __inline float16x8_t
18903 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18904 __arm_vldrhq_f16 (float16_t
const * __base
)
18906 return __builtin_mve_vldrhq_fv8hf((__builtin_neon_hi
*) __base
);
18909 __extension__
extern __inline float16x8_t
18910 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18911 __arm_vldrhq_gather_offset_f16 (float16_t
const * __base
, uint16x8_t __offset
)
18913 return __builtin_mve_vldrhq_gather_offset_fv8hf((__builtin_neon_hi
*) __base
, __offset
);
18916 __extension__
extern __inline float16x8_t
18917 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18918 __arm_vldrhq_gather_offset_z_f16 (float16_t
const * __base
, uint16x8_t __offset
, mve_pred16_t __p
)
18920 return __builtin_mve_vldrhq_gather_offset_z_fv8hf((__builtin_neon_hi
*) __base
, __offset
, __p
);
18923 __extension__
extern __inline float16x8_t
18924 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18925 __arm_vldrhq_gather_shifted_offset_f16 (float16_t
const * __base
, uint16x8_t __offset
)
18927 return __builtin_mve_vldrhq_gather_shifted_offset_fv8hf ((__builtin_neon_hi
*) __base
, __offset
);
18930 __extension__
extern __inline float16x8_t
18931 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18932 __arm_vldrhq_gather_shifted_offset_z_f16 (float16_t
const * __base
, uint16x8_t __offset
, mve_pred16_t __p
)
18934 return __builtin_mve_vldrhq_gather_shifted_offset_z_fv8hf ((__builtin_neon_hi
*) __base
, __offset
, __p
);
18937 __extension__
extern __inline float32x4_t
18938 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18939 __arm_vldrwq_gather_base_f32 (uint32x4_t __addr
, const int __offset
)
18941 return __builtin_mve_vldrwq_gather_base_fv4sf (__addr
, __offset
);
18944 __extension__
extern __inline float32x4_t
18945 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18946 __arm_vldrwq_gather_base_z_f32 (uint32x4_t __addr
, const int __offset
, mve_pred16_t __p
)
18948 return __builtin_mve_vldrwq_gather_base_z_fv4sf (__addr
, __offset
, __p
);
18951 __extension__
extern __inline float32x4_t
18952 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18953 __arm_vldrwq_gather_offset_f32 (float32_t
const * __base
, uint32x4_t __offset
)
18955 return __builtin_mve_vldrwq_gather_offset_fv4sf((__builtin_neon_si
*) __base
, __offset
);
18958 __extension__
extern __inline float32x4_t
18959 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18960 __arm_vldrwq_gather_offset_z_f32 (float32_t
const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
18962 return __builtin_mve_vldrwq_gather_offset_z_fv4sf((__builtin_neon_si
*) __base
, __offset
, __p
);
18965 __extension__
extern __inline float32x4_t
18966 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18967 __arm_vldrwq_gather_shifted_offset_f32 (float32_t
const * __base
, uint32x4_t __offset
)
18969 return __builtin_mve_vldrwq_gather_shifted_offset_fv4sf ((__builtin_neon_si
*) __base
, __offset
);
18972 __extension__
extern __inline float32x4_t
18973 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18974 __arm_vldrwq_gather_shifted_offset_z_f32 (float32_t
const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
18976 return __builtin_mve_vldrwq_gather_shifted_offset_z_fv4sf ((__builtin_neon_si
*) __base
, __offset
, __p
);
18979 __extension__
extern __inline
void
18980 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18981 __arm_vstrwq_p_f32 (float32_t
* __addr
, float32x4_t __value
, mve_pred16_t __p
)
18983 __builtin_mve_vstrwq_p_fv4sf ((__builtin_neon_si
*) __addr
, __value
, __p
);
18986 __extension__
extern __inline
void
18987 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18988 __arm_vstrwq_f32 (float32_t
* __addr
, float32x4_t __value
)
18990 __builtin_mve_vstrwq_fv4sf ((__builtin_neon_si
*) __addr
, __value
);
18993 __extension__
extern __inline
void
18994 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
18995 __arm_vst1q_f32 (float32_t
* __addr
, float32x4_t __value
)
18997 __builtin_mve_vst1q_fv4sf ((__builtin_neon_si
*) __addr
, __value
);
19000 __extension__
extern __inline
void
19001 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19002 __arm_vst1q_f16 (float16_t
* __addr
, float16x8_t __value
)
19004 __builtin_mve_vst1q_fv8hf ((__builtin_neon_hi
*) __addr
, __value
);
19007 __extension__
extern __inline
void
19008 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19009 __arm_vstrhq_f16 (float16_t
* __addr
, float16x8_t __value
)
19011 __builtin_mve_vstrhq_fv8hf ((__builtin_neon_hi
*) __addr
, __value
);
19014 __extension__
extern __inline
void
19015 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19016 __arm_vstrhq_p_f16 (float16_t
* __addr
, float16x8_t __value
, mve_pred16_t __p
)
19018 __builtin_mve_vstrhq_p_fv8hf ((__builtin_neon_hi
*) __addr
, __value
, __p
);
19021 __extension__
extern __inline
void
19022 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19023 __arm_vstrhq_scatter_offset_f16 (float16_t
* __base
, uint16x8_t __offset
, float16x8_t __value
)
19025 __builtin_mve_vstrhq_scatter_offset_fv8hf ((__builtin_neon_hi
*) __base
, __offset
, __value
);
19028 __extension__
extern __inline
void
19029 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19030 __arm_vstrhq_scatter_offset_p_f16 (float16_t
* __base
, uint16x8_t __offset
, float16x8_t __value
, mve_pred16_t __p
)
19032 __builtin_mve_vstrhq_scatter_offset_p_fv8hf ((__builtin_neon_hi
*) __base
, __offset
, __value
, __p
);
19035 __extension__
extern __inline
void
19036 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19037 __arm_vstrhq_scatter_shifted_offset_f16 (float16_t
* __base
, uint16x8_t __offset
, float16x8_t __value
)
19039 __builtin_mve_vstrhq_scatter_shifted_offset_fv8hf ((__builtin_neon_hi
*) __base
, __offset
, __value
);
19042 __extension__
extern __inline
void
19043 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19044 __arm_vstrhq_scatter_shifted_offset_p_f16 (float16_t
* __base
, uint16x8_t __offset
, float16x8_t __value
, mve_pred16_t __p
)
19046 __builtin_mve_vstrhq_scatter_shifted_offset_p_fv8hf ((__builtin_neon_hi
*) __base
, __offset
, __value
, __p
);
19049 __extension__
extern __inline
void
19050 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19051 __arm_vstrwq_scatter_base_f32 (uint32x4_t __addr
, const int __offset
, float32x4_t __value
)
19053 __builtin_mve_vstrwq_scatter_base_fv4sf (__addr
, __offset
, __value
);
19056 __extension__
extern __inline
void
19057 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19058 __arm_vstrwq_scatter_base_p_f32 (uint32x4_t __addr
, const int __offset
, float32x4_t __value
, mve_pred16_t __p
)
19060 __builtin_mve_vstrwq_scatter_base_p_fv4sf (__addr
, __offset
, __value
, __p
);
19063 __extension__
extern __inline
void
19064 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19065 __arm_vstrwq_scatter_offset_f32 (float32_t
* __base
, uint32x4_t __offset
, float32x4_t __value
)
19067 __builtin_mve_vstrwq_scatter_offset_fv4sf ((__builtin_neon_si
*) __base
, __offset
, __value
);
19070 __extension__
extern __inline
void
19071 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19072 __arm_vstrwq_scatter_offset_p_f32 (float32_t
* __base
, uint32x4_t __offset
, float32x4_t __value
, mve_pred16_t __p
)
19074 __builtin_mve_vstrwq_scatter_offset_p_fv4sf ((__builtin_neon_si
*) __base
, __offset
, __value
, __p
);
19077 __extension__
extern __inline
void
19078 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19079 __arm_vstrwq_scatter_shifted_offset_f32 (float32_t
* __base
, uint32x4_t __offset
, float32x4_t __value
)
19081 __builtin_mve_vstrwq_scatter_shifted_offset_fv4sf ((__builtin_neon_si
*) __base
, __offset
, __value
);
19084 __extension__
extern __inline
void
19085 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19086 __arm_vstrwq_scatter_shifted_offset_p_f32 (float32_t
* __base
, uint32x4_t __offset
, float32x4_t __value
, mve_pred16_t __p
)
19088 __builtin_mve_vstrwq_scatter_shifted_offset_p_fv4sf ((__builtin_neon_si
*) __base
, __offset
, __value
, __p
);
19091 __extension__
extern __inline float16x8_t
19092 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19093 __arm_vaddq_f16 (float16x8_t __a
, float16x8_t __b
)
19098 __extension__
extern __inline float32x4_t
19099 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19100 __arm_vaddq_f32 (float32x4_t __a
, float32x4_t __b
)
19105 __extension__
extern __inline float32x4_t
19106 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19107 __arm_vldrwq_gather_base_wb_f32 (uint32x4_t
* __addr
, const int __offset
)
19110 result
= __builtin_mve_vldrwq_gather_base_nowb_fv4sf (*__addr
, __offset
);
19111 *__addr
= __builtin_mve_vldrwq_gather_base_wb_fv4sf (*__addr
, __offset
);
19115 __extension__
extern __inline float32x4_t
19116 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19117 __arm_vldrwq_gather_base_wb_z_f32 (uint32x4_t
* __addr
, const int __offset
, mve_pred16_t __p
)
19120 result
= __builtin_mve_vldrwq_gather_base_nowb_z_fv4sf (*__addr
, __offset
, __p
);
19121 *__addr
= __builtin_mve_vldrwq_gather_base_wb_z_fv4sf (*__addr
, __offset
, __p
);
19125 __extension__
extern __inline
void
19126 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19127 __arm_vstrwq_scatter_base_wb_f32 (uint32x4_t
* __addr
, const int __offset
, float32x4_t __value
)
19129 *__addr
= __builtin_mve_vstrwq_scatter_base_wb_fv4sf (*__addr
, __offset
, __value
);
19132 __extension__
extern __inline
void
19133 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19134 __arm_vstrwq_scatter_base_wb_p_f32 (uint32x4_t
* __addr
, const int __offset
, float32x4_t __value
, mve_pred16_t __p
)
19136 *__addr
= __builtin_mve_vstrwq_scatter_base_wb_p_fv4sf (*__addr
, __offset
, __value
, __p
);
19139 __extension__
extern __inline float16x8_t
19140 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19141 __arm_vdupq_x_n_f16 (float16_t __a
, mve_pred16_t __p
)
19143 return __builtin_mve_vdupq_m_n_fv8hf (__arm_vuninitializedq_f16 (), __a
, __p
);
19146 __extension__
extern __inline float32x4_t
19147 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19148 __arm_vdupq_x_n_f32 (float32_t __a
, mve_pred16_t __p
)
19150 return __builtin_mve_vdupq_m_n_fv4sf (__arm_vuninitializedq_f32 (), __a
, __p
);
19153 __extension__
extern __inline float16x8_t
19154 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19155 __arm_vminnmq_x_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
19157 return __builtin_mve_vminnmq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19160 __extension__
extern __inline float32x4_t
19161 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19162 __arm_vminnmq_x_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
19164 return __builtin_mve_vminnmq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19167 __extension__
extern __inline float16x8_t
19168 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19169 __arm_vmaxnmq_x_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
19171 return __builtin_mve_vmaxnmq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19174 __extension__
extern __inline float32x4_t
19175 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19176 __arm_vmaxnmq_x_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
19178 return __builtin_mve_vmaxnmq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19181 __extension__
extern __inline float16x8_t
19182 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19183 __arm_vabdq_x_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
19185 return __builtin_mve_vabdq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19188 __extension__
extern __inline float32x4_t
19189 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19190 __arm_vabdq_x_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
19192 return __builtin_mve_vabdq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19195 __extension__
extern __inline float16x8_t
19196 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19197 __arm_vabsq_x_f16 (float16x8_t __a
, mve_pred16_t __p
)
19199 return __builtin_mve_vabsq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __p
);
19202 __extension__
extern __inline float32x4_t
19203 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19204 __arm_vabsq_x_f32 (float32x4_t __a
, mve_pred16_t __p
)
19206 return __builtin_mve_vabsq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __p
);
19209 __extension__
extern __inline float16x8_t
19210 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19211 __arm_vaddq_x_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
19213 return __builtin_mve_vaddq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19216 __extension__
extern __inline float32x4_t
19217 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19218 __arm_vaddq_x_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
19220 return __builtin_mve_vaddq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19223 __extension__
extern __inline float16x8_t
19224 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19225 __arm_vaddq_x_n_f16 (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
19227 return __builtin_mve_vaddq_m_n_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19230 __extension__
extern __inline float32x4_t
19231 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19232 __arm_vaddq_x_n_f32 (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
19234 return __builtin_mve_vaddq_m_n_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19237 __extension__
extern __inline float16x8_t
19238 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19239 __arm_vnegq_x_f16 (float16x8_t __a
, mve_pred16_t __p
)
19241 return __builtin_mve_vnegq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __p
);
19244 __extension__
extern __inline float32x4_t
19245 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19246 __arm_vnegq_x_f32 (float32x4_t __a
, mve_pred16_t __p
)
19248 return __builtin_mve_vnegq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __p
);
19251 __extension__
extern __inline float16x8_t
19252 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19253 __arm_vmulq_x_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
19255 return __builtin_mve_vmulq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19258 __extension__
extern __inline float32x4_t
19259 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19260 __arm_vmulq_x_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
19262 return __builtin_mve_vmulq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19265 __extension__
extern __inline float16x8_t
19266 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19267 __arm_vmulq_x_n_f16 (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
19269 return __builtin_mve_vmulq_m_n_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19272 __extension__
extern __inline float32x4_t
19273 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19274 __arm_vmulq_x_n_f32 (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
19276 return __builtin_mve_vmulq_m_n_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19279 __extension__
extern __inline float16x8_t
19280 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19281 __arm_vsubq_x_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
19283 return __builtin_mve_vsubq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19286 __extension__
extern __inline float32x4_t
19287 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19288 __arm_vsubq_x_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
19290 return __builtin_mve_vsubq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19293 __extension__
extern __inline float16x8_t
19294 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19295 __arm_vsubq_x_n_f16 (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
19297 return __builtin_mve_vsubq_m_n_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19300 __extension__
extern __inline float32x4_t
19301 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19302 __arm_vsubq_x_n_f32 (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
19304 return __builtin_mve_vsubq_m_n_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19307 __extension__
extern __inline float16x8_t
19308 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19309 __arm_vcaddq_rot90_x_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
19311 return __builtin_mve_vcaddq_rot90_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19314 __extension__
extern __inline float32x4_t
19315 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19316 __arm_vcaddq_rot90_x_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
19318 return __builtin_mve_vcaddq_rot90_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19321 __extension__
extern __inline float16x8_t
19322 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19323 __arm_vcaddq_rot270_x_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
19325 return __builtin_mve_vcaddq_rot270_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19328 __extension__
extern __inline float32x4_t
19329 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19330 __arm_vcaddq_rot270_x_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
19332 return __builtin_mve_vcaddq_rot270_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19335 __extension__
extern __inline float16x8_t
19336 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19337 __arm_vcmulq_x_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
19339 return __builtin_mve_vcmulq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19342 __extension__
extern __inline float32x4_t
19343 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19344 __arm_vcmulq_x_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
19346 return __builtin_mve_vcmulq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19349 __extension__
extern __inline float16x8_t
19350 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19351 __arm_vcmulq_rot90_x_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
19353 return __builtin_mve_vcmulq_rot90_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19356 __extension__
extern __inline float32x4_t
19357 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19358 __arm_vcmulq_rot90_x_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
19360 return __builtin_mve_vcmulq_rot90_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19363 __extension__
extern __inline float16x8_t
19364 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19365 __arm_vcmulq_rot180_x_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
19367 return __builtin_mve_vcmulq_rot180_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19370 __extension__
extern __inline float32x4_t
19371 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19372 __arm_vcmulq_rot180_x_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
19374 return __builtin_mve_vcmulq_rot180_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19377 __extension__
extern __inline float16x8_t
19378 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19379 __arm_vcmulq_rot270_x_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
19381 return __builtin_mve_vcmulq_rot270_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19384 __extension__
extern __inline float32x4_t
19385 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19386 __arm_vcmulq_rot270_x_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
19388 return __builtin_mve_vcmulq_rot270_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19391 __extension__
extern __inline int16x8_t
19392 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19393 __arm_vcvtaq_x_s16_f16 (float16x8_t __a
, mve_pred16_t __p
)
19395 return __builtin_mve_vcvtaq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __p
);
19398 __extension__
extern __inline int32x4_t
19399 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19400 __arm_vcvtaq_x_s32_f32 (float32x4_t __a
, mve_pred16_t __p
)
19402 return __builtin_mve_vcvtaq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __p
);
19405 __extension__
extern __inline uint16x8_t
19406 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19407 __arm_vcvtaq_x_u16_f16 (float16x8_t __a
, mve_pred16_t __p
)
19409 return __builtin_mve_vcvtaq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __p
);
19412 __extension__
extern __inline uint32x4_t
19413 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19414 __arm_vcvtaq_x_u32_f32 (float32x4_t __a
, mve_pred16_t __p
)
19416 return __builtin_mve_vcvtaq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __p
);
19419 __extension__
extern __inline int16x8_t
19420 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19421 __arm_vcvtnq_x_s16_f16 (float16x8_t __a
, mve_pred16_t __p
)
19423 return __builtin_mve_vcvtnq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __p
);
19426 __extension__
extern __inline int32x4_t
19427 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19428 __arm_vcvtnq_x_s32_f32 (float32x4_t __a
, mve_pred16_t __p
)
19430 return __builtin_mve_vcvtnq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __p
);
19433 __extension__
extern __inline uint16x8_t
19434 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19435 __arm_vcvtnq_x_u16_f16 (float16x8_t __a
, mve_pred16_t __p
)
19437 return __builtin_mve_vcvtnq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __p
);
19440 __extension__
extern __inline uint32x4_t
19441 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19442 __arm_vcvtnq_x_u32_f32 (float32x4_t __a
, mve_pred16_t __p
)
19444 return __builtin_mve_vcvtnq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __p
);
19447 __extension__
extern __inline int16x8_t
19448 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19449 __arm_vcvtpq_x_s16_f16 (float16x8_t __a
, mve_pred16_t __p
)
19451 return __builtin_mve_vcvtpq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __p
);
19454 __extension__
extern __inline int32x4_t
19455 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19456 __arm_vcvtpq_x_s32_f32 (float32x4_t __a
, mve_pred16_t __p
)
19458 return __builtin_mve_vcvtpq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __p
);
19461 __extension__
extern __inline uint16x8_t
19462 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19463 __arm_vcvtpq_x_u16_f16 (float16x8_t __a
, mve_pred16_t __p
)
19465 return __builtin_mve_vcvtpq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __p
);
19468 __extension__
extern __inline uint32x4_t
19469 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19470 __arm_vcvtpq_x_u32_f32 (float32x4_t __a
, mve_pred16_t __p
)
19472 return __builtin_mve_vcvtpq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __p
);
19475 __extension__
extern __inline int16x8_t
19476 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19477 __arm_vcvtmq_x_s16_f16 (float16x8_t __a
, mve_pred16_t __p
)
19479 return __builtin_mve_vcvtmq_m_sv8hi (__arm_vuninitializedq_s16 (), __a
, __p
);
19482 __extension__
extern __inline int32x4_t
19483 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19484 __arm_vcvtmq_x_s32_f32 (float32x4_t __a
, mve_pred16_t __p
)
19486 return __builtin_mve_vcvtmq_m_sv4si (__arm_vuninitializedq_s32 (), __a
, __p
);
19489 __extension__
extern __inline uint16x8_t
19490 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19491 __arm_vcvtmq_x_u16_f16 (float16x8_t __a
, mve_pred16_t __p
)
19493 return __builtin_mve_vcvtmq_m_uv8hi (__arm_vuninitializedq_u16 (), __a
, __p
);
19496 __extension__
extern __inline uint32x4_t
19497 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19498 __arm_vcvtmq_x_u32_f32 (float32x4_t __a
, mve_pred16_t __p
)
19500 return __builtin_mve_vcvtmq_m_uv4si (__arm_vuninitializedq_u32 (), __a
, __p
);
19503 __extension__
extern __inline float32x4_t
19504 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19505 __arm_vcvtbq_x_f32_f16 (float16x8_t __a
, mve_pred16_t __p
)
19507 return __builtin_mve_vcvtbq_m_f32_f16v4sf (__arm_vuninitializedq_f32 (), __a
, __p
);
19510 __extension__
extern __inline float32x4_t
19511 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19512 __arm_vcvttq_x_f32_f16 (float16x8_t __a
, mve_pred16_t __p
)
19514 return __builtin_mve_vcvttq_m_f32_f16v4sf (__arm_vuninitializedq_f32 (), __a
, __p
);
19517 __extension__
extern __inline float16x8_t
19518 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19519 __arm_vcvtq_x_f16_u16 (uint16x8_t __a
, mve_pred16_t __p
)
19521 return __builtin_mve_vcvtq_m_to_f_uv8hf (__arm_vuninitializedq_f16 (), __a
, __p
);
19524 __extension__
extern __inline float16x8_t
19525 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19526 __arm_vcvtq_x_f16_s16 (int16x8_t __a
, mve_pred16_t __p
)
19528 return __builtin_mve_vcvtq_m_to_f_sv8hf (__arm_vuninitializedq_f16 (), __a
, __p
);
19531 __extension__
extern __inline float32x4_t
19532 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19533 __arm_vcvtq_x_f32_s32 (int32x4_t __a
, mve_pred16_t __p
)
19535 return __builtin_mve_vcvtq_m_to_f_sv4sf (__arm_vuninitializedq_f32 (), __a
, __p
);
19538 __extension__
extern __inline float32x4_t
19539 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19540 __arm_vcvtq_x_f32_u32 (uint32x4_t __a
, mve_pred16_t __p
)
19542 return __builtin_mve_vcvtq_m_to_f_uv4sf (__arm_vuninitializedq_f32 (), __a
, __p
);
19545 __extension__
extern __inline float16x8_t
19546 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19547 __arm_vcvtq_x_n_f16_s16 (int16x8_t __a
, const int __imm6
, mve_pred16_t __p
)
19549 return __builtin_mve_vcvtq_m_n_to_f_sv8hf (__arm_vuninitializedq_f16 (), __a
, __imm6
, __p
);
19552 __extension__
extern __inline float16x8_t
19553 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19554 __arm_vcvtq_x_n_f16_u16 (uint16x8_t __a
, const int __imm6
, mve_pred16_t __p
)
19556 return __builtin_mve_vcvtq_m_n_to_f_uv8hf (__arm_vuninitializedq_f16 (), __a
, __imm6
, __p
);
19559 __extension__
extern __inline float32x4_t
19560 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19561 __arm_vcvtq_x_n_f32_s32 (int32x4_t __a
, const int __imm6
, mve_pred16_t __p
)
19563 return __builtin_mve_vcvtq_m_n_to_f_sv4sf (__arm_vuninitializedq_f32 (), __a
, __imm6
, __p
);
19566 __extension__
extern __inline float32x4_t
19567 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19568 __arm_vcvtq_x_n_f32_u32 (uint32x4_t __a
, const int __imm6
, mve_pred16_t __p
)
19570 return __builtin_mve_vcvtq_m_n_to_f_uv4sf (__arm_vuninitializedq_f32 (), __a
, __imm6
, __p
);
19573 __extension__
extern __inline int16x8_t
19574 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19575 __arm_vcvtq_x_s16_f16 (float16x8_t __a
, mve_pred16_t __p
)
19577 return __builtin_mve_vcvtq_m_from_f_sv8hi (__arm_vuninitializedq_s16 (), __a
, __p
);
19580 __extension__
extern __inline int32x4_t
19581 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19582 __arm_vcvtq_x_s32_f32 (float32x4_t __a
, mve_pred16_t __p
)
19584 return __builtin_mve_vcvtq_m_from_f_sv4si (__arm_vuninitializedq_s32 (), __a
, __p
);
19587 __extension__
extern __inline uint16x8_t
19588 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19589 __arm_vcvtq_x_u16_f16 (float16x8_t __a
, mve_pred16_t __p
)
19591 return __builtin_mve_vcvtq_m_from_f_uv8hi (__arm_vuninitializedq_u16 (), __a
, __p
);
19594 __extension__
extern __inline uint32x4_t
19595 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19596 __arm_vcvtq_x_u32_f32 (float32x4_t __a
, mve_pred16_t __p
)
19598 return __builtin_mve_vcvtq_m_from_f_uv4si (__arm_vuninitializedq_u32 (), __a
, __p
);
19601 __extension__
extern __inline int16x8_t
19602 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19603 __arm_vcvtq_x_n_s16_f16 (float16x8_t __a
, const int __imm6
, mve_pred16_t __p
)
19605 return __builtin_mve_vcvtq_m_n_from_f_sv8hi (__arm_vuninitializedq_s16 (), __a
, __imm6
, __p
);
19608 __extension__
extern __inline int32x4_t
19609 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19610 __arm_vcvtq_x_n_s32_f32 (float32x4_t __a
, const int __imm6
, mve_pred16_t __p
)
19612 return __builtin_mve_vcvtq_m_n_from_f_sv4si (__arm_vuninitializedq_s32 (), __a
, __imm6
, __p
);
19615 __extension__
extern __inline uint16x8_t
19616 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19617 __arm_vcvtq_x_n_u16_f16 (float16x8_t __a
, const int __imm6
, mve_pred16_t __p
)
19619 return __builtin_mve_vcvtq_m_n_from_f_uv8hi (__arm_vuninitializedq_u16 (), __a
, __imm6
, __p
);
19622 __extension__
extern __inline uint32x4_t
19623 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19624 __arm_vcvtq_x_n_u32_f32 (float32x4_t __a
, const int __imm6
, mve_pred16_t __p
)
19626 return __builtin_mve_vcvtq_m_n_from_f_uv4si (__arm_vuninitializedq_u32 (), __a
, __imm6
, __p
);
19629 __extension__
extern __inline float16x8_t
19630 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19631 __arm_vrndq_x_f16 (float16x8_t __a
, mve_pred16_t __p
)
19633 return __builtin_mve_vrndq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __p
);
19636 __extension__
extern __inline float32x4_t
19637 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19638 __arm_vrndq_x_f32 (float32x4_t __a
, mve_pred16_t __p
)
19640 return __builtin_mve_vrndq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __p
);
19643 __extension__
extern __inline float16x8_t
19644 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19645 __arm_vrndnq_x_f16 (float16x8_t __a
, mve_pred16_t __p
)
19647 return __builtin_mve_vrndnq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __p
);
19650 __extension__
extern __inline float32x4_t
19651 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19652 __arm_vrndnq_x_f32 (float32x4_t __a
, mve_pred16_t __p
)
19654 return __builtin_mve_vrndnq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __p
);
19657 __extension__
extern __inline float16x8_t
19658 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19659 __arm_vrndmq_x_f16 (float16x8_t __a
, mve_pred16_t __p
)
19661 return __builtin_mve_vrndmq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __p
);
19664 __extension__
extern __inline float32x4_t
19665 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19666 __arm_vrndmq_x_f32 (float32x4_t __a
, mve_pred16_t __p
)
19668 return __builtin_mve_vrndmq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __p
);
19671 __extension__
extern __inline float16x8_t
19672 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19673 __arm_vrndpq_x_f16 (float16x8_t __a
, mve_pred16_t __p
)
19675 return __builtin_mve_vrndpq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __p
);
19678 __extension__
extern __inline float32x4_t
19679 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19680 __arm_vrndpq_x_f32 (float32x4_t __a
, mve_pred16_t __p
)
19682 return __builtin_mve_vrndpq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __p
);
19685 __extension__
extern __inline float16x8_t
19686 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19687 __arm_vrndaq_x_f16 (float16x8_t __a
, mve_pred16_t __p
)
19689 return __builtin_mve_vrndaq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __p
);
19692 __extension__
extern __inline float32x4_t
19693 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19694 __arm_vrndaq_x_f32 (float32x4_t __a
, mve_pred16_t __p
)
19696 return __builtin_mve_vrndaq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __p
);
19699 __extension__
extern __inline float16x8_t
19700 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19701 __arm_vrndxq_x_f16 (float16x8_t __a
, mve_pred16_t __p
)
19703 return __builtin_mve_vrndxq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __p
);
19706 __extension__
extern __inline float32x4_t
19707 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19708 __arm_vrndxq_x_f32 (float32x4_t __a
, mve_pred16_t __p
)
19710 return __builtin_mve_vrndxq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __p
);
19713 __extension__
extern __inline float16x8_t
19714 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19715 __arm_vandq_x_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
19717 return __builtin_mve_vandq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19720 __extension__
extern __inline float32x4_t
19721 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19722 __arm_vandq_x_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
19724 return __builtin_mve_vandq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19727 __extension__
extern __inline float16x8_t
19728 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19729 __arm_vbicq_x_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
19731 return __builtin_mve_vbicq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19734 __extension__
extern __inline float32x4_t
19735 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19736 __arm_vbicq_x_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
19738 return __builtin_mve_vbicq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19741 __extension__
extern __inline float16x8_t
19742 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19743 __arm_vbrsrq_x_n_f16 (float16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
19745 return __builtin_mve_vbrsrq_m_n_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19748 __extension__
extern __inline float32x4_t
19749 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19750 __arm_vbrsrq_x_n_f32 (float32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
19752 return __builtin_mve_vbrsrq_m_n_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19755 __extension__
extern __inline float16x8_t
19756 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19757 __arm_veorq_x_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
19759 return __builtin_mve_veorq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19762 __extension__
extern __inline float32x4_t
19763 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19764 __arm_veorq_x_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
19766 return __builtin_mve_veorq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19769 __extension__
extern __inline float16x8_t
19770 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19771 __arm_vornq_x_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
19773 return __builtin_mve_vornq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19776 __extension__
extern __inline float32x4_t
19777 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19778 __arm_vornq_x_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
19780 return __builtin_mve_vornq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19783 __extension__
extern __inline float16x8_t
19784 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19785 __arm_vorrq_x_f16 (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
19787 return __builtin_mve_vorrq_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __b
, __p
);
19790 __extension__
extern __inline float32x4_t
19791 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19792 __arm_vorrq_x_f32 (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
19794 return __builtin_mve_vorrq_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __b
, __p
);
19797 __extension__
extern __inline float16x8_t
19798 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19799 __arm_vrev32q_x_f16 (float16x8_t __a
, mve_pred16_t __p
)
19801 return __builtin_mve_vrev32q_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __p
);
19804 __extension__
extern __inline float16x8_t
19805 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19806 __arm_vrev64q_x_f16 (float16x8_t __a
, mve_pred16_t __p
)
19808 return __builtin_mve_vrev64q_m_fv8hf (__arm_vuninitializedq_f16 (), __a
, __p
);
19811 __extension__
extern __inline float32x4_t
19812 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19813 __arm_vrev64q_x_f32 (float32x4_t __a
, mve_pred16_t __p
)
19815 return __builtin_mve_vrev64q_m_fv4sf (__arm_vuninitializedq_f32 (), __a
, __p
);
19818 __extension__
extern __inline float16x8x4_t
19819 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19820 __arm_vld4q_f16 (float16_t
const * __addr
)
19822 union { float16x8x4_t __i
; __builtin_neon_xi __o
; } __rv
;
19823 __rv
.__o
= __builtin_mve_vld4qv8hf (__addr
);
19827 __extension__
extern __inline float16x8x2_t
19828 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19829 __arm_vld2q_f16 (float16_t
const * __addr
)
19831 union { float16x8x2_t __i
; __builtin_neon_oi __o
; } __rv
;
19832 __rv
.__o
= __builtin_mve_vld2qv8hf (__addr
);
19836 __extension__
extern __inline float16x8_t
19837 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19838 __arm_vld1q_z_f16 (float16_t
const *__base
, mve_pred16_t __p
)
19840 return vldrhq_z_f16 (__base
, __p
);
19843 __extension__
extern __inline
void
19844 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19845 __arm_vst2q_f16 (float16_t
* __addr
, float16x8x2_t __value
)
19847 union { float16x8x2_t __i
; __builtin_neon_oi __o
; } __rv
;
19848 __rv
.__i
= __value
;
19849 __builtin_mve_vst2qv8hf (__addr
, __rv
.__o
);
19852 __extension__
extern __inline
void
19853 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19854 __arm_vst1q_p_f16 (float16_t
* __addr
, float16x8_t __value
, mve_pred16_t __p
)
19856 return vstrhq_p_f16 (__addr
, __value
, __p
);
19859 __extension__
extern __inline float32x4x4_t
19860 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19861 __arm_vld4q_f32 (float32_t
const * __addr
)
19863 union { float32x4x4_t __i
; __builtin_neon_xi __o
; } __rv
;
19864 __rv
.__o
= __builtin_mve_vld4qv4sf (__addr
);
19868 __extension__
extern __inline float32x4x2_t
19869 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19870 __arm_vld2q_f32 (float32_t
const * __addr
)
19872 union { float32x4x2_t __i
; __builtin_neon_oi __o
; } __rv
;
19873 __rv
.__o
= __builtin_mve_vld2qv4sf (__addr
);
19877 __extension__
extern __inline float32x4_t
19878 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19879 __arm_vld1q_z_f32 (float32_t
const *__base
, mve_pred16_t __p
)
19881 return vldrwq_z_f32 (__base
, __p
);
19884 __extension__
extern __inline
void
19885 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19886 __arm_vst2q_f32 (float32_t
* __addr
, float32x4x2_t __value
)
19888 union { float32x4x2_t __i
; __builtin_neon_oi __o
; } __rv
;
19889 __rv
.__i
= __value
;
19890 __builtin_mve_vst2qv4sf (__addr
, __rv
.__o
);
19893 __extension__
extern __inline
void
19894 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19895 __arm_vst1q_p_f32 (float32_t
* __addr
, float32x4_t __value
, mve_pred16_t __p
)
19897 return vstrwq_p_f32 (__addr
, __value
, __p
);
19900 __extension__
extern __inline float16x8_t
19901 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19902 __arm_vsetq_lane_f16 (float16_t __a
, float16x8_t __b
, const int __idx
)
19904 __ARM_CHECK_LANEQ (__b
, __idx
);
19905 __b
[__ARM_LANEQ(__b
,__idx
)] = __a
;
19909 __extension__
extern __inline float32x4_t
19910 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19911 __arm_vsetq_lane_f32 (float32_t __a
, float32x4_t __b
, const int __idx
)
19913 __ARM_CHECK_LANEQ (__b
, __idx
);
19914 __b
[__ARM_LANEQ(__b
,__idx
)] = __a
;
19918 __extension__
extern __inline float16_t
19919 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19920 __arm_vgetq_lane_f16 (float16x8_t __a
, const int __idx
)
19922 __ARM_CHECK_LANEQ (__a
, __idx
);
19923 return __a
[__ARM_LANEQ(__a
,__idx
)];
19926 __extension__
extern __inline float32_t
19927 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19928 __arm_vgetq_lane_f32 (float32x4_t __a
, const int __idx
)
19930 __ARM_CHECK_LANEQ (__a
, __idx
);
19931 return __a
[__ARM_LANEQ(__a
,__idx
)];
19936 __extension__
extern __inline
void
19937 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19938 __arm_vst4q (int8_t * __addr
, int8x16x4_t __value
)
19940 __arm_vst4q_s8 (__addr
, __value
);
19943 __extension__
extern __inline
void
19944 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19945 __arm_vst4q (int16_t * __addr
, int16x8x4_t __value
)
19947 __arm_vst4q_s16 (__addr
, __value
);
19950 __extension__
extern __inline
void
19951 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19952 __arm_vst4q (int32_t * __addr
, int32x4x4_t __value
)
19954 __arm_vst4q_s32 (__addr
, __value
);
19957 __extension__
extern __inline
void
19958 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19959 __arm_vst4q (uint8_t * __addr
, uint8x16x4_t __value
)
19961 __arm_vst4q_u8 (__addr
, __value
);
19964 __extension__
extern __inline
void
19965 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19966 __arm_vst4q (uint16_t * __addr
, uint16x8x4_t __value
)
19968 __arm_vst4q_u16 (__addr
, __value
);
19971 __extension__
extern __inline
void
19972 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19973 __arm_vst4q (uint32_t * __addr
, uint32x4x4_t __value
)
19975 __arm_vst4q_u32 (__addr
, __value
);
19978 __extension__
extern __inline int8x16_t
19979 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19980 __arm_vdupq_n (int8_t __a
)
19982 return __arm_vdupq_n_s8 (__a
);
19985 __extension__
extern __inline int16x8_t
19986 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19987 __arm_vdupq_n (int16_t __a
)
19989 return __arm_vdupq_n_s16 (__a
);
19992 __extension__
extern __inline int32x4_t
19993 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
19994 __arm_vdupq_n (int32_t __a
)
19996 return __arm_vdupq_n_s32 (__a
);
19999 __extension__
extern __inline int8x16_t
20000 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20001 __arm_vabsq (int8x16_t __a
)
20003 return __arm_vabsq_s8 (__a
);
20006 __extension__
extern __inline int16x8_t
20007 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20008 __arm_vabsq (int16x8_t __a
)
20010 return __arm_vabsq_s16 (__a
);
20013 __extension__
extern __inline int32x4_t
20014 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20015 __arm_vabsq (int32x4_t __a
)
20017 return __arm_vabsq_s32 (__a
);
20020 __extension__
extern __inline int8x16_t
20021 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20022 __arm_vclsq (int8x16_t __a
)
20024 return __arm_vclsq_s8 (__a
);
20027 __extension__
extern __inline int16x8_t
20028 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20029 __arm_vclsq (int16x8_t __a
)
20031 return __arm_vclsq_s16 (__a
);
20034 __extension__
extern __inline int32x4_t
20035 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20036 __arm_vclsq (int32x4_t __a
)
20038 return __arm_vclsq_s32 (__a
);
20041 __extension__
extern __inline int8x16_t
20042 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20043 __arm_vclzq (int8x16_t __a
)
20045 return __arm_vclzq_s8 (__a
);
20048 __extension__
extern __inline int16x8_t
20049 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20050 __arm_vclzq (int16x8_t __a
)
20052 return __arm_vclzq_s16 (__a
);
20055 __extension__
extern __inline int32x4_t
20056 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20057 __arm_vclzq (int32x4_t __a
)
20059 return __arm_vclzq_s32 (__a
);
20062 __extension__
extern __inline int8x16_t
20063 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20064 __arm_vnegq (int8x16_t __a
)
20066 return __arm_vnegq_s8 (__a
);
20069 __extension__
extern __inline int16x8_t
20070 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20071 __arm_vnegq (int16x8_t __a
)
20073 return __arm_vnegq_s16 (__a
);
20076 __extension__
extern __inline int32x4_t
20077 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20078 __arm_vnegq (int32x4_t __a
)
20080 return __arm_vnegq_s32 (__a
);
20083 __extension__
extern __inline
int64_t
20084 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20085 __arm_vaddlvq (int32x4_t __a
)
20087 return __arm_vaddlvq_s32 (__a
);
20090 __extension__
extern __inline
int32_t
20091 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20092 __arm_vaddvq (int8x16_t __a
)
20094 return __arm_vaddvq_s8 (__a
);
20097 __extension__
extern __inline
int32_t
20098 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20099 __arm_vaddvq (int16x8_t __a
)
20101 return __arm_vaddvq_s16 (__a
);
20104 __extension__
extern __inline
int32_t
20105 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20106 __arm_vaddvq (int32x4_t __a
)
20108 return __arm_vaddvq_s32 (__a
);
20111 __extension__
extern __inline int16x8_t
20112 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20113 __arm_vmovlbq (int8x16_t __a
)
20115 return __arm_vmovlbq_s8 (__a
);
20118 __extension__
extern __inline int32x4_t
20119 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20120 __arm_vmovlbq (int16x8_t __a
)
20122 return __arm_vmovlbq_s16 (__a
);
20125 __extension__
extern __inline int16x8_t
20126 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20127 __arm_vmovltq (int8x16_t __a
)
20129 return __arm_vmovltq_s8 (__a
);
20132 __extension__
extern __inline int32x4_t
20133 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20134 __arm_vmovltq (int16x8_t __a
)
20136 return __arm_vmovltq_s16 (__a
);
20139 __extension__
extern __inline int8x16_t
20140 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20141 __arm_vmvnq (int8x16_t __a
)
20143 return __arm_vmvnq_s8 (__a
);
20146 __extension__
extern __inline int16x8_t
20147 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20148 __arm_vmvnq (int16x8_t __a
)
20150 return __arm_vmvnq_s16 (__a
);
20153 __extension__
extern __inline int32x4_t
20154 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20155 __arm_vmvnq (int32x4_t __a
)
20157 return __arm_vmvnq_s32 (__a
);
20160 __extension__
extern __inline int8x16_t
20161 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20162 __arm_vrev16q (int8x16_t __a
)
20164 return __arm_vrev16q_s8 (__a
);
20167 __extension__
extern __inline int8x16_t
20168 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20169 __arm_vrev32q (int8x16_t __a
)
20171 return __arm_vrev32q_s8 (__a
);
20174 __extension__
extern __inline int16x8_t
20175 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20176 __arm_vrev32q (int16x8_t __a
)
20178 return __arm_vrev32q_s16 (__a
);
20181 __extension__
extern __inline int8x16_t
20182 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20183 __arm_vrev64q (int8x16_t __a
)
20185 return __arm_vrev64q_s8 (__a
);
20188 __extension__
extern __inline int16x8_t
20189 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20190 __arm_vrev64q (int16x8_t __a
)
20192 return __arm_vrev64q_s16 (__a
);
20195 __extension__
extern __inline int32x4_t
20196 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20197 __arm_vrev64q (int32x4_t __a
)
20199 return __arm_vrev64q_s32 (__a
);
20202 __extension__
extern __inline int8x16_t
20203 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20204 __arm_vqabsq (int8x16_t __a
)
20206 return __arm_vqabsq_s8 (__a
);
20209 __extension__
extern __inline int16x8_t
20210 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20211 __arm_vqabsq (int16x8_t __a
)
20213 return __arm_vqabsq_s16 (__a
);
20216 __extension__
extern __inline int32x4_t
20217 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20218 __arm_vqabsq (int32x4_t __a
)
20220 return __arm_vqabsq_s32 (__a
);
20223 __extension__
extern __inline int8x16_t
20224 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20225 __arm_vqnegq (int8x16_t __a
)
20227 return __arm_vqnegq_s8 (__a
);
20230 __extension__
extern __inline int16x8_t
20231 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20232 __arm_vqnegq (int16x8_t __a
)
20234 return __arm_vqnegq_s16 (__a
);
20237 __extension__
extern __inline int32x4_t
20238 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20239 __arm_vqnegq (int32x4_t __a
)
20241 return __arm_vqnegq_s32 (__a
);
20244 __extension__
extern __inline uint8x16_t
20245 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20246 __arm_vrev64q (uint8x16_t __a
)
20248 return __arm_vrev64q_u8 (__a
);
20251 __extension__
extern __inline uint16x8_t
20252 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20253 __arm_vrev64q (uint16x8_t __a
)
20255 return __arm_vrev64q_u16 (__a
);
20258 __extension__
extern __inline uint32x4_t
20259 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20260 __arm_vrev64q (uint32x4_t __a
)
20262 return __arm_vrev64q_u32 (__a
);
20265 __extension__
extern __inline uint8x16_t
20266 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20267 __arm_vmvnq (uint8x16_t __a
)
20269 return __arm_vmvnq_u8 (__a
);
20272 __extension__
extern __inline uint16x8_t
20273 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20274 __arm_vmvnq (uint16x8_t __a
)
20276 return __arm_vmvnq_u16 (__a
);
20279 __extension__
extern __inline uint32x4_t
20280 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20281 __arm_vmvnq (uint32x4_t __a
)
20283 return __arm_vmvnq_u32 (__a
);
20286 __extension__
extern __inline uint8x16_t
20287 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20288 __arm_vdupq_n (uint8_t __a
)
20290 return __arm_vdupq_n_u8 (__a
);
20293 __extension__
extern __inline uint16x8_t
20294 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20295 __arm_vdupq_n (uint16_t __a
)
20297 return __arm_vdupq_n_u16 (__a
);
20300 __extension__
extern __inline uint32x4_t
20301 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20302 __arm_vdupq_n (uint32_t __a
)
20304 return __arm_vdupq_n_u32 (__a
);
20307 __extension__
extern __inline uint8x16_t
20308 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20309 __arm_vclzq (uint8x16_t __a
)
20311 return __arm_vclzq_u8 (__a
);
20314 __extension__
extern __inline uint16x8_t
20315 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20316 __arm_vclzq (uint16x8_t __a
)
20318 return __arm_vclzq_u16 (__a
);
20321 __extension__
extern __inline uint32x4_t
20322 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20323 __arm_vclzq (uint32x4_t __a
)
20325 return __arm_vclzq_u32 (__a
);
20328 __extension__
extern __inline
uint32_t
20329 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20330 __arm_vaddvq (uint8x16_t __a
)
20332 return __arm_vaddvq_u8 (__a
);
20335 __extension__
extern __inline
uint32_t
20336 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20337 __arm_vaddvq (uint16x8_t __a
)
20339 return __arm_vaddvq_u16 (__a
);
20342 __extension__
extern __inline
uint32_t
20343 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20344 __arm_vaddvq (uint32x4_t __a
)
20346 return __arm_vaddvq_u32 (__a
);
20349 __extension__
extern __inline uint8x16_t
20350 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20351 __arm_vrev32q (uint8x16_t __a
)
20353 return __arm_vrev32q_u8 (__a
);
20356 __extension__
extern __inline uint16x8_t
20357 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20358 __arm_vrev32q (uint16x8_t __a
)
20360 return __arm_vrev32q_u16 (__a
);
20363 __extension__
extern __inline uint16x8_t
20364 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20365 __arm_vmovltq (uint8x16_t __a
)
20367 return __arm_vmovltq_u8 (__a
);
20370 __extension__
extern __inline uint32x4_t
20371 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20372 __arm_vmovltq (uint16x8_t __a
)
20374 return __arm_vmovltq_u16 (__a
);
20377 __extension__
extern __inline uint16x8_t
20378 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20379 __arm_vmovlbq (uint8x16_t __a
)
20381 return __arm_vmovlbq_u8 (__a
);
20384 __extension__
extern __inline uint32x4_t
20385 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20386 __arm_vmovlbq (uint16x8_t __a
)
20388 return __arm_vmovlbq_u16 (__a
);
20391 __extension__
extern __inline uint8x16_t
20392 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20393 __arm_vrev16q (uint8x16_t __a
)
20395 return __arm_vrev16q_u8 (__a
);
20398 __extension__
extern __inline
uint64_t
20399 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20400 __arm_vaddlvq (uint32x4_t __a
)
20402 return __arm_vaddlvq_u32 (__a
);
20405 __extension__
extern __inline int8x16_t
20406 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20407 __arm_vshrq (int8x16_t __a
, const int __imm
)
20409 return __arm_vshrq_n_s8 (__a
, __imm
);
20412 __extension__
extern __inline int16x8_t
20413 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20414 __arm_vshrq (int16x8_t __a
, const int __imm
)
20416 return __arm_vshrq_n_s16 (__a
, __imm
);
20419 __extension__
extern __inline int32x4_t
20420 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20421 __arm_vshrq (int32x4_t __a
, const int __imm
)
20423 return __arm_vshrq_n_s32 (__a
, __imm
);
20426 __extension__
extern __inline uint8x16_t
20427 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20428 __arm_vshrq (uint8x16_t __a
, const int __imm
)
20430 return __arm_vshrq_n_u8 (__a
, __imm
);
20433 __extension__
extern __inline uint16x8_t
20434 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20435 __arm_vshrq (uint16x8_t __a
, const int __imm
)
20437 return __arm_vshrq_n_u16 (__a
, __imm
);
20440 __extension__
extern __inline uint32x4_t
20441 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20442 __arm_vshrq (uint32x4_t __a
, const int __imm
)
20444 return __arm_vshrq_n_u32 (__a
, __imm
);
20447 __extension__
extern __inline
int64_t
20448 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20449 __arm_vaddlvq_p (int32x4_t __a
, mve_pred16_t __p
)
20451 return __arm_vaddlvq_p_s32 (__a
, __p
);
20454 __extension__
extern __inline
uint64_t
20455 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20456 __arm_vaddlvq_p (uint32x4_t __a
, mve_pred16_t __p
)
20458 return __arm_vaddlvq_p_u32 (__a
, __p
);
20461 __extension__
extern __inline
int32_t
20462 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20463 __arm_vcmpneq (int8x16_t __a
, int8x16_t __b
)
20465 return __arm_vcmpneq_s8 (__a
, __b
);
20468 __extension__
extern __inline mve_pred16_t
20469 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20470 __arm_vcmpneq (int16x8_t __a
, int16x8_t __b
)
20472 return __arm_vcmpneq_s16 (__a
, __b
);
20475 __extension__
extern __inline mve_pred16_t
20476 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20477 __arm_vcmpneq (int32x4_t __a
, int32x4_t __b
)
20479 return __arm_vcmpneq_s32 (__a
, __b
);
20482 __extension__
extern __inline mve_pred16_t
20483 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20484 __arm_vcmpneq (uint8x16_t __a
, uint8x16_t __b
)
20486 return __arm_vcmpneq_u8 (__a
, __b
);
20489 __extension__
extern __inline mve_pred16_t
20490 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20491 __arm_vcmpneq (uint16x8_t __a
, uint16x8_t __b
)
20493 return __arm_vcmpneq_u16 (__a
, __b
);
20496 __extension__
extern __inline mve_pred16_t
20497 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20498 __arm_vcmpneq (uint32x4_t __a
, uint32x4_t __b
)
20500 return __arm_vcmpneq_u32 (__a
, __b
);
20503 __extension__
extern __inline int8x16_t
20504 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20505 __arm_vshlq (int8x16_t __a
, int8x16_t __b
)
20507 return __arm_vshlq_s8 (__a
, __b
);
20510 __extension__
extern __inline int16x8_t
20511 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20512 __arm_vshlq (int16x8_t __a
, int16x8_t __b
)
20514 return __arm_vshlq_s16 (__a
, __b
);
20517 __extension__
extern __inline int32x4_t
20518 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20519 __arm_vshlq (int32x4_t __a
, int32x4_t __b
)
20521 return __arm_vshlq_s32 (__a
, __b
);
20524 __extension__
extern __inline uint8x16_t
20525 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20526 __arm_vshlq (uint8x16_t __a
, int8x16_t __b
)
20528 return __arm_vshlq_u8 (__a
, __b
);
20531 __extension__
extern __inline uint16x8_t
20532 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20533 __arm_vshlq (uint16x8_t __a
, int16x8_t __b
)
20535 return __arm_vshlq_u16 (__a
, __b
);
20538 __extension__
extern __inline uint32x4_t
20539 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20540 __arm_vshlq (uint32x4_t __a
, int32x4_t __b
)
20542 return __arm_vshlq_u32 (__a
, __b
);
20545 __extension__
extern __inline uint8x16_t
20546 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20547 __arm_vsubq (uint8x16_t __a
, uint8x16_t __b
)
20549 return __arm_vsubq_u8 (__a
, __b
);
20552 __extension__
extern __inline uint8x16_t
20553 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20554 __arm_vsubq (uint8x16_t __a
, uint8_t __b
)
20556 return __arm_vsubq_n_u8 (__a
, __b
);
20559 __extension__
extern __inline uint8x16_t
20560 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20561 __arm_vrmulhq (uint8x16_t __a
, uint8x16_t __b
)
20563 return __arm_vrmulhq_u8 (__a
, __b
);
20566 __extension__
extern __inline uint8x16_t
20567 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20568 __arm_vrhaddq (uint8x16_t __a
, uint8x16_t __b
)
20570 return __arm_vrhaddq_u8 (__a
, __b
);
20573 __extension__
extern __inline uint8x16_t
20574 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20575 __arm_vqsubq (uint8x16_t __a
, uint8x16_t __b
)
20577 return __arm_vqsubq_u8 (__a
, __b
);
20580 __extension__
extern __inline uint8x16_t
20581 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20582 __arm_vqsubq (uint8x16_t __a
, uint8_t __b
)
20584 return __arm_vqsubq_n_u8 (__a
, __b
);
20587 __extension__
extern __inline uint8x16_t
20588 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20589 __arm_vqaddq (uint8x16_t __a
, uint8x16_t __b
)
20591 return __arm_vqaddq_u8 (__a
, __b
);
20594 __extension__
extern __inline uint8x16_t
20595 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20596 __arm_vqaddq (uint8x16_t __a
, uint8_t __b
)
20598 return __arm_vqaddq_n_u8 (__a
, __b
);
20601 __extension__
extern __inline uint8x16_t
20602 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20603 __arm_vorrq (uint8x16_t __a
, uint8x16_t __b
)
20605 return __arm_vorrq_u8 (__a
, __b
);
20608 __extension__
extern __inline uint8x16_t
20609 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20610 __arm_vornq (uint8x16_t __a
, uint8x16_t __b
)
20612 return __arm_vornq_u8 (__a
, __b
);
20615 __extension__
extern __inline uint8x16_t
20616 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20617 __arm_vmulq (uint8x16_t __a
, uint8x16_t __b
)
20619 return __arm_vmulq_u8 (__a
, __b
);
20622 __extension__
extern __inline uint8x16_t
20623 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20624 __arm_vmulq (uint8x16_t __a
, uint8_t __b
)
20626 return __arm_vmulq_n_u8 (__a
, __b
);
20629 __extension__
extern __inline uint16x8_t
20630 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20631 __arm_vmulltq_int (uint8x16_t __a
, uint8x16_t __b
)
20633 return __arm_vmulltq_int_u8 (__a
, __b
);
20636 __extension__
extern __inline uint16x8_t
20637 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20638 __arm_vmullbq_int (uint8x16_t __a
, uint8x16_t __b
)
20640 return __arm_vmullbq_int_u8 (__a
, __b
);
20643 __extension__
extern __inline uint8x16_t
20644 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20645 __arm_vmulhq (uint8x16_t __a
, uint8x16_t __b
)
20647 return __arm_vmulhq_u8 (__a
, __b
);
20650 __extension__
extern __inline
uint32_t
20651 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20652 __arm_vmladavq (uint8x16_t __a
, uint8x16_t __b
)
20654 return __arm_vmladavq_u8 (__a
, __b
);
20657 __extension__
extern __inline
uint8_t
20658 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20659 __arm_vminvq (uint8_t __a
, uint8x16_t __b
)
20661 return __arm_vminvq_u8 (__a
, __b
);
20664 __extension__
extern __inline uint8x16_t
20665 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20666 __arm_vminq (uint8x16_t __a
, uint8x16_t __b
)
20668 return __arm_vminq_u8 (__a
, __b
);
20671 __extension__
extern __inline
uint8_t
20672 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20673 __arm_vmaxvq (uint8_t __a
, uint8x16_t __b
)
20675 return __arm_vmaxvq_u8 (__a
, __b
);
20678 __extension__
extern __inline uint8x16_t
20679 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20680 __arm_vmaxq (uint8x16_t __a
, uint8x16_t __b
)
20682 return __arm_vmaxq_u8 (__a
, __b
);
20685 __extension__
extern __inline uint8x16_t
20686 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20687 __arm_vhsubq (uint8x16_t __a
, uint8x16_t __b
)
20689 return __arm_vhsubq_u8 (__a
, __b
);
20692 __extension__
extern __inline uint8x16_t
20693 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20694 __arm_vhsubq (uint8x16_t __a
, uint8_t __b
)
20696 return __arm_vhsubq_n_u8 (__a
, __b
);
20699 __extension__
extern __inline uint8x16_t
20700 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20701 __arm_vhaddq (uint8x16_t __a
, uint8x16_t __b
)
20703 return __arm_vhaddq_u8 (__a
, __b
);
20706 __extension__
extern __inline uint8x16_t
20707 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20708 __arm_vhaddq (uint8x16_t __a
, uint8_t __b
)
20710 return __arm_vhaddq_n_u8 (__a
, __b
);
20713 __extension__
extern __inline uint8x16_t
20714 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20715 __arm_veorq (uint8x16_t __a
, uint8x16_t __b
)
20717 return __arm_veorq_u8 (__a
, __b
);
20720 __extension__
extern __inline mve_pred16_t
20721 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20722 __arm_vcmpneq (uint8x16_t __a
, uint8_t __b
)
20724 return __arm_vcmpneq_n_u8 (__a
, __b
);
20727 __extension__
extern __inline mve_pred16_t
20728 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20729 __arm_vcmphiq (uint8x16_t __a
, uint8x16_t __b
)
20731 return __arm_vcmphiq_u8 (__a
, __b
);
20734 __extension__
extern __inline mve_pred16_t
20735 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20736 __arm_vcmphiq (uint8x16_t __a
, uint8_t __b
)
20738 return __arm_vcmphiq_n_u8 (__a
, __b
);
20741 __extension__
extern __inline mve_pred16_t
20742 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20743 __arm_vcmpeqq (uint8x16_t __a
, uint8x16_t __b
)
20745 return __arm_vcmpeqq_u8 (__a
, __b
);
20748 __extension__
extern __inline mve_pred16_t
20749 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20750 __arm_vcmpeqq (uint8x16_t __a
, uint8_t __b
)
20752 return __arm_vcmpeqq_n_u8 (__a
, __b
);
20755 __extension__
extern __inline mve_pred16_t
20756 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20757 __arm_vcmpcsq (uint8x16_t __a
, uint8x16_t __b
)
20759 return __arm_vcmpcsq_u8 (__a
, __b
);
20762 __extension__
extern __inline mve_pred16_t
20763 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20764 __arm_vcmpcsq (uint8x16_t __a
, uint8_t __b
)
20766 return __arm_vcmpcsq_n_u8 (__a
, __b
);
20769 __extension__
extern __inline uint8x16_t
20770 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20771 __arm_vcaddq_rot90 (uint8x16_t __a
, uint8x16_t __b
)
20773 return __arm_vcaddq_rot90_u8 (__a
, __b
);
20776 __extension__
extern __inline uint8x16_t
20777 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20778 __arm_vcaddq_rot270 (uint8x16_t __a
, uint8x16_t __b
)
20780 return __arm_vcaddq_rot270_u8 (__a
, __b
);
20783 __extension__
extern __inline uint8x16_t
20784 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20785 __arm_vbicq (uint8x16_t __a
, uint8x16_t __b
)
20787 return __arm_vbicq_u8 (__a
, __b
);
20790 __extension__
extern __inline uint8x16_t
20791 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20792 __arm_vandq (uint8x16_t __a
, uint8x16_t __b
)
20794 return __arm_vandq_u8 (__a
, __b
);
20797 __extension__
extern __inline
uint32_t
20798 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20799 __arm_vaddvq_p (uint8x16_t __a
, mve_pred16_t __p
)
20801 return __arm_vaddvq_p_u8 (__a
, __p
);
20804 __extension__
extern __inline
uint32_t
20805 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20806 __arm_vaddvaq (uint32_t __a
, uint8x16_t __b
)
20808 return __arm_vaddvaq_u8 (__a
, __b
);
20811 __extension__
extern __inline uint8x16_t
20812 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20813 __arm_vaddq (uint8x16_t __a
, uint8_t __b
)
20815 return __arm_vaddq_n_u8 (__a
, __b
);
20818 __extension__
extern __inline uint8x16_t
20819 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20820 __arm_vabdq (uint8x16_t __a
, uint8x16_t __b
)
20822 return __arm_vabdq_u8 (__a
, __b
);
20825 __extension__
extern __inline uint8x16_t
20826 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20827 __arm_vshlq_r (uint8x16_t __a
, int32_t __b
)
20829 return __arm_vshlq_r_u8 (__a
, __b
);
20832 __extension__
extern __inline uint8x16_t
20833 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20834 __arm_vrshlq (uint8x16_t __a
, int8x16_t __b
)
20836 return __arm_vrshlq_u8 (__a
, __b
);
20839 __extension__
extern __inline uint8x16_t
20840 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20841 __arm_vrshlq (uint8x16_t __a
, int32_t __b
)
20843 return __arm_vrshlq_n_u8 (__a
, __b
);
20846 __extension__
extern __inline uint8x16_t
20847 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20848 __arm_vqshlq (uint8x16_t __a
, int8x16_t __b
)
20850 return __arm_vqshlq_u8 (__a
, __b
);
20853 __extension__
extern __inline uint8x16_t
20854 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20855 __arm_vqshlq_r (uint8x16_t __a
, int32_t __b
)
20857 return __arm_vqshlq_r_u8 (__a
, __b
);
20860 __extension__
extern __inline uint8x16_t
20861 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20862 __arm_vqrshlq (uint8x16_t __a
, int8x16_t __b
)
20864 return __arm_vqrshlq_u8 (__a
, __b
);
20867 __extension__
extern __inline uint8x16_t
20868 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20869 __arm_vqrshlq (uint8x16_t __a
, int32_t __b
)
20871 return __arm_vqrshlq_n_u8 (__a
, __b
);
20874 __extension__
extern __inline
uint8_t
20875 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20876 __arm_vminavq (uint8_t __a
, int8x16_t __b
)
20878 return __arm_vminavq_s8 (__a
, __b
);
20881 __extension__
extern __inline uint8x16_t
20882 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20883 __arm_vminaq (uint8x16_t __a
, int8x16_t __b
)
20885 return __arm_vminaq_s8 (__a
, __b
);
20888 __extension__
extern __inline
uint8_t
20889 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20890 __arm_vmaxavq (uint8_t __a
, int8x16_t __b
)
20892 return __arm_vmaxavq_s8 (__a
, __b
);
20895 __extension__
extern __inline uint8x16_t
20896 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20897 __arm_vmaxaq (uint8x16_t __a
, int8x16_t __b
)
20899 return __arm_vmaxaq_s8 (__a
, __b
);
20902 __extension__
extern __inline uint8x16_t
20903 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20904 __arm_vbrsrq (uint8x16_t __a
, int32_t __b
)
20906 return __arm_vbrsrq_n_u8 (__a
, __b
);
20909 __extension__
extern __inline uint8x16_t
20910 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20911 __arm_vshlq_n (uint8x16_t __a
, const int __imm
)
20913 return __arm_vshlq_n_u8 (__a
, __imm
);
20916 __extension__
extern __inline uint8x16_t
20917 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20918 __arm_vrshrq (uint8x16_t __a
, const int __imm
)
20920 return __arm_vrshrq_n_u8 (__a
, __imm
);
20923 __extension__
extern __inline uint8x16_t
20924 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20925 __arm_vqshlq_n (uint8x16_t __a
, const int __imm
)
20927 return __arm_vqshlq_n_u8 (__a
, __imm
);
20930 __extension__
extern __inline mve_pred16_t
20931 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20932 __arm_vcmpneq (int8x16_t __a
, int8_t __b
)
20934 return __arm_vcmpneq_n_s8 (__a
, __b
);
20937 __extension__
extern __inline mve_pred16_t
20938 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20939 __arm_vcmpltq (int8x16_t __a
, int8x16_t __b
)
20941 return __arm_vcmpltq_s8 (__a
, __b
);
20944 __extension__
extern __inline mve_pred16_t
20945 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20946 __arm_vcmpltq (int8x16_t __a
, int8_t __b
)
20948 return __arm_vcmpltq_n_s8 (__a
, __b
);
20951 __extension__
extern __inline mve_pred16_t
20952 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20953 __arm_vcmpleq (int8x16_t __a
, int8x16_t __b
)
20955 return __arm_vcmpleq_s8 (__a
, __b
);
20958 __extension__
extern __inline mve_pred16_t
20959 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20960 __arm_vcmpleq (int8x16_t __a
, int8_t __b
)
20962 return __arm_vcmpleq_n_s8 (__a
, __b
);
20965 __extension__
extern __inline mve_pred16_t
20966 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20967 __arm_vcmpgtq (int8x16_t __a
, int8x16_t __b
)
20969 return __arm_vcmpgtq_s8 (__a
, __b
);
20972 __extension__
extern __inline mve_pred16_t
20973 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20974 __arm_vcmpgtq (int8x16_t __a
, int8_t __b
)
20976 return __arm_vcmpgtq_n_s8 (__a
, __b
);
20979 __extension__
extern __inline mve_pred16_t
20980 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20981 __arm_vcmpgeq (int8x16_t __a
, int8x16_t __b
)
20983 return __arm_vcmpgeq_s8 (__a
, __b
);
20986 __extension__
extern __inline mve_pred16_t
20987 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20988 __arm_vcmpgeq (int8x16_t __a
, int8_t __b
)
20990 return __arm_vcmpgeq_n_s8 (__a
, __b
);
20993 __extension__
extern __inline mve_pred16_t
20994 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
20995 __arm_vcmpeqq (int8x16_t __a
, int8x16_t __b
)
20997 return __arm_vcmpeqq_s8 (__a
, __b
);
21000 __extension__
extern __inline mve_pred16_t
21001 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21002 __arm_vcmpeqq (int8x16_t __a
, int8_t __b
)
21004 return __arm_vcmpeqq_n_s8 (__a
, __b
);
21007 __extension__
extern __inline uint8x16_t
21008 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21009 __arm_vqshluq (int8x16_t __a
, const int __imm
)
21011 return __arm_vqshluq_n_s8 (__a
, __imm
);
21014 __extension__
extern __inline
int32_t
21015 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21016 __arm_vaddvq_p (int8x16_t __a
, mve_pred16_t __p
)
21018 return __arm_vaddvq_p_s8 (__a
, __p
);
21021 __extension__
extern __inline int8x16_t
21022 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21023 __arm_vsubq (int8x16_t __a
, int8x16_t __b
)
21025 return __arm_vsubq_s8 (__a
, __b
);
21028 __extension__
extern __inline int8x16_t
21029 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21030 __arm_vsubq (int8x16_t __a
, int8_t __b
)
21032 return __arm_vsubq_n_s8 (__a
, __b
);
21035 __extension__
extern __inline int8x16_t
21036 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21037 __arm_vshlq_r (int8x16_t __a
, int32_t __b
)
21039 return __arm_vshlq_r_s8 (__a
, __b
);
21042 __extension__
extern __inline int8x16_t
21043 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21044 __arm_vrshlq (int8x16_t __a
, int8x16_t __b
)
21046 return __arm_vrshlq_s8 (__a
, __b
);
21049 __extension__
extern __inline int8x16_t
21050 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21051 __arm_vrshlq (int8x16_t __a
, int32_t __b
)
21053 return __arm_vrshlq_n_s8 (__a
, __b
);
21056 __extension__
extern __inline int8x16_t
21057 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21058 __arm_vrmulhq (int8x16_t __a
, int8x16_t __b
)
21060 return __arm_vrmulhq_s8 (__a
, __b
);
21063 __extension__
extern __inline int8x16_t
21064 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21065 __arm_vrhaddq (int8x16_t __a
, int8x16_t __b
)
21067 return __arm_vrhaddq_s8 (__a
, __b
);
21070 __extension__
extern __inline int8x16_t
21071 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21072 __arm_vqsubq (int8x16_t __a
, int8x16_t __b
)
21074 return __arm_vqsubq_s8 (__a
, __b
);
21077 __extension__
extern __inline int8x16_t
21078 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21079 __arm_vqsubq (int8x16_t __a
, int8_t __b
)
21081 return __arm_vqsubq_n_s8 (__a
, __b
);
21084 __extension__
extern __inline int8x16_t
21085 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21086 __arm_vqshlq (int8x16_t __a
, int8x16_t __b
)
21088 return __arm_vqshlq_s8 (__a
, __b
);
21091 __extension__
extern __inline int8x16_t
21092 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21093 __arm_vqshlq_r (int8x16_t __a
, int32_t __b
)
21095 return __arm_vqshlq_r_s8 (__a
, __b
);
21098 __extension__
extern __inline int8x16_t
21099 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21100 __arm_vqrshlq (int8x16_t __a
, int8x16_t __b
)
21102 return __arm_vqrshlq_s8 (__a
, __b
);
21105 __extension__
extern __inline int8x16_t
21106 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21107 __arm_vqrshlq (int8x16_t __a
, int32_t __b
)
21109 return __arm_vqrshlq_n_s8 (__a
, __b
);
21112 __extension__
extern __inline int8x16_t
21113 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21114 __arm_vqrdmulhq (int8x16_t __a
, int8x16_t __b
)
21116 return __arm_vqrdmulhq_s8 (__a
, __b
);
21119 __extension__
extern __inline int8x16_t
21120 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21121 __arm_vqrdmulhq (int8x16_t __a
, int8_t __b
)
21123 return __arm_vqrdmulhq_n_s8 (__a
, __b
);
21126 __extension__
extern __inline int8x16_t
21127 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21128 __arm_vqdmulhq (int8x16_t __a
, int8x16_t __b
)
21130 return __arm_vqdmulhq_s8 (__a
, __b
);
21133 __extension__
extern __inline int8x16_t
21134 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21135 __arm_vqdmulhq (int8x16_t __a
, int8_t __b
)
21137 return __arm_vqdmulhq_n_s8 (__a
, __b
);
21140 __extension__
extern __inline int8x16_t
21141 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21142 __arm_vqaddq (int8x16_t __a
, int8x16_t __b
)
21144 return __arm_vqaddq_s8 (__a
, __b
);
21147 __extension__
extern __inline int8x16_t
21148 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21149 __arm_vqaddq (int8x16_t __a
, int8_t __b
)
21151 return __arm_vqaddq_n_s8 (__a
, __b
);
21154 __extension__
extern __inline int8x16_t
21155 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21156 __arm_vorrq (int8x16_t __a
, int8x16_t __b
)
21158 return __arm_vorrq_s8 (__a
, __b
);
21161 __extension__
extern __inline int8x16_t
21162 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21163 __arm_vornq (int8x16_t __a
, int8x16_t __b
)
21165 return __arm_vornq_s8 (__a
, __b
);
21168 __extension__
extern __inline int8x16_t
21169 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21170 __arm_vmulq (int8x16_t __a
, int8x16_t __b
)
21172 return __arm_vmulq_s8 (__a
, __b
);
21175 __extension__
extern __inline int8x16_t
21176 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21177 __arm_vmulq (int8x16_t __a
, int8_t __b
)
21179 return __arm_vmulq_n_s8 (__a
, __b
);
21182 __extension__
extern __inline int16x8_t
21183 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21184 __arm_vmulltq_int (int8x16_t __a
, int8x16_t __b
)
21186 return __arm_vmulltq_int_s8 (__a
, __b
);
21189 __extension__
extern __inline int16x8_t
21190 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21191 __arm_vmullbq_int (int8x16_t __a
, int8x16_t __b
)
21193 return __arm_vmullbq_int_s8 (__a
, __b
);
21196 __extension__
extern __inline int8x16_t
21197 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21198 __arm_vmulhq (int8x16_t __a
, int8x16_t __b
)
21200 return __arm_vmulhq_s8 (__a
, __b
);
21203 __extension__
extern __inline
int32_t
21204 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21205 __arm_vmlsdavxq (int8x16_t __a
, int8x16_t __b
)
21207 return __arm_vmlsdavxq_s8 (__a
, __b
);
21210 __extension__
extern __inline
int32_t
21211 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21212 __arm_vmlsdavq (int8x16_t __a
, int8x16_t __b
)
21214 return __arm_vmlsdavq_s8 (__a
, __b
);
21217 __extension__
extern __inline
int32_t
21218 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21219 __arm_vmladavxq (int8x16_t __a
, int8x16_t __b
)
21221 return __arm_vmladavxq_s8 (__a
, __b
);
21224 __extension__
extern __inline
int32_t
21225 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21226 __arm_vmladavq (int8x16_t __a
, int8x16_t __b
)
21228 return __arm_vmladavq_s8 (__a
, __b
);
21231 __extension__
extern __inline
int8_t
21232 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21233 __arm_vminvq (int8_t __a
, int8x16_t __b
)
21235 return __arm_vminvq_s8 (__a
, __b
);
21238 __extension__
extern __inline int8x16_t
21239 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21240 __arm_vminq (int8x16_t __a
, int8x16_t __b
)
21242 return __arm_vminq_s8 (__a
, __b
);
21245 __extension__
extern __inline
int8_t
21246 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21247 __arm_vmaxvq (int8_t __a
, int8x16_t __b
)
21249 return __arm_vmaxvq_s8 (__a
, __b
);
21252 __extension__
extern __inline int8x16_t
21253 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21254 __arm_vmaxq (int8x16_t __a
, int8x16_t __b
)
21256 return __arm_vmaxq_s8 (__a
, __b
);
21259 __extension__
extern __inline int8x16_t
21260 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21261 __arm_vhsubq (int8x16_t __a
, int8x16_t __b
)
21263 return __arm_vhsubq_s8 (__a
, __b
);
21266 __extension__
extern __inline int8x16_t
21267 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21268 __arm_vhsubq (int8x16_t __a
, int8_t __b
)
21270 return __arm_vhsubq_n_s8 (__a
, __b
);
21273 __extension__
extern __inline int8x16_t
21274 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21275 __arm_vhcaddq_rot90 (int8x16_t __a
, int8x16_t __b
)
21277 return __arm_vhcaddq_rot90_s8 (__a
, __b
);
21280 __extension__
extern __inline int8x16_t
21281 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21282 __arm_vhcaddq_rot270 (int8x16_t __a
, int8x16_t __b
)
21284 return __arm_vhcaddq_rot270_s8 (__a
, __b
);
21287 __extension__
extern __inline int8x16_t
21288 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21289 __arm_vhaddq (int8x16_t __a
, int8x16_t __b
)
21291 return __arm_vhaddq_s8 (__a
, __b
);
21294 __extension__
extern __inline int8x16_t
21295 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21296 __arm_vhaddq (int8x16_t __a
, int8_t __b
)
21298 return __arm_vhaddq_n_s8 (__a
, __b
);
21301 __extension__
extern __inline int8x16_t
21302 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21303 __arm_veorq (int8x16_t __a
, int8x16_t __b
)
21305 return __arm_veorq_s8 (__a
, __b
);
21308 __extension__
extern __inline int8x16_t
21309 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21310 __arm_vcaddq_rot90 (int8x16_t __a
, int8x16_t __b
)
21312 return __arm_vcaddq_rot90_s8 (__a
, __b
);
21315 __extension__
extern __inline int8x16_t
21316 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21317 __arm_vcaddq_rot270 (int8x16_t __a
, int8x16_t __b
)
21319 return __arm_vcaddq_rot270_s8 (__a
, __b
);
21322 __extension__
extern __inline int8x16_t
21323 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21324 __arm_vbrsrq (int8x16_t __a
, int32_t __b
)
21326 return __arm_vbrsrq_n_s8 (__a
, __b
);
21329 __extension__
extern __inline int8x16_t
21330 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21331 __arm_vbicq (int8x16_t __a
, int8x16_t __b
)
21333 return __arm_vbicq_s8 (__a
, __b
);
21336 __extension__
extern __inline int8x16_t
21337 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21338 __arm_vandq (int8x16_t __a
, int8x16_t __b
)
21340 return __arm_vandq_s8 (__a
, __b
);
21343 __extension__
extern __inline
int32_t
21344 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21345 __arm_vaddvaq (int32_t __a
, int8x16_t __b
)
21347 return __arm_vaddvaq_s8 (__a
, __b
);
21350 __extension__
extern __inline int8x16_t
21351 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21352 __arm_vaddq (int8x16_t __a
, int8_t __b
)
21354 return __arm_vaddq_n_s8 (__a
, __b
);
21357 __extension__
extern __inline int8x16_t
21358 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21359 __arm_vabdq (int8x16_t __a
, int8x16_t __b
)
21361 return __arm_vabdq_s8 (__a
, __b
);
21364 __extension__
extern __inline int8x16_t
21365 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21366 __arm_vshlq_n (int8x16_t __a
, const int __imm
)
21368 return __arm_vshlq_n_s8 (__a
, __imm
);
21371 __extension__
extern __inline int8x16_t
21372 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21373 __arm_vrshrq (int8x16_t __a
, const int __imm
)
21375 return __arm_vrshrq_n_s8 (__a
, __imm
);
21378 __extension__
extern __inline int8x16_t
21379 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21380 __arm_vqshlq_n (int8x16_t __a
, const int __imm
)
21382 return __arm_vqshlq_n_s8 (__a
, __imm
);
21385 __extension__
extern __inline uint16x8_t
21386 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21387 __arm_vsubq (uint16x8_t __a
, uint16x8_t __b
)
21389 return __arm_vsubq_u16 (__a
, __b
);
21392 __extension__
extern __inline uint16x8_t
21393 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21394 __arm_vsubq (uint16x8_t __a
, uint16_t __b
)
21396 return __arm_vsubq_n_u16 (__a
, __b
);
21399 __extension__
extern __inline uint16x8_t
21400 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21401 __arm_vrmulhq (uint16x8_t __a
, uint16x8_t __b
)
21403 return __arm_vrmulhq_u16 (__a
, __b
);
21406 __extension__
extern __inline uint16x8_t
21407 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21408 __arm_vrhaddq (uint16x8_t __a
, uint16x8_t __b
)
21410 return __arm_vrhaddq_u16 (__a
, __b
);
21413 __extension__
extern __inline uint16x8_t
21414 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21415 __arm_vqsubq (uint16x8_t __a
, uint16x8_t __b
)
21417 return __arm_vqsubq_u16 (__a
, __b
);
21420 __extension__
extern __inline uint16x8_t
21421 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21422 __arm_vqsubq (uint16x8_t __a
, uint16_t __b
)
21424 return __arm_vqsubq_n_u16 (__a
, __b
);
21427 __extension__
extern __inline uint16x8_t
21428 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21429 __arm_vqaddq (uint16x8_t __a
, uint16x8_t __b
)
21431 return __arm_vqaddq_u16 (__a
, __b
);
21434 __extension__
extern __inline uint16x8_t
21435 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21436 __arm_vqaddq (uint16x8_t __a
, uint16_t __b
)
21438 return __arm_vqaddq_n_u16 (__a
, __b
);
21441 __extension__
extern __inline uint16x8_t
21442 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21443 __arm_vorrq (uint16x8_t __a
, uint16x8_t __b
)
21445 return __arm_vorrq_u16 (__a
, __b
);
21448 __extension__
extern __inline uint16x8_t
21449 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21450 __arm_vornq (uint16x8_t __a
, uint16x8_t __b
)
21452 return __arm_vornq_u16 (__a
, __b
);
21455 __extension__
extern __inline uint16x8_t
21456 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21457 __arm_vmulq (uint16x8_t __a
, uint16x8_t __b
)
21459 return __arm_vmulq_u16 (__a
, __b
);
21462 __extension__
extern __inline uint16x8_t
21463 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21464 __arm_vmulq (uint16x8_t __a
, uint16_t __b
)
21466 return __arm_vmulq_n_u16 (__a
, __b
);
21469 __extension__
extern __inline uint32x4_t
21470 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21471 __arm_vmulltq_int (uint16x8_t __a
, uint16x8_t __b
)
21473 return __arm_vmulltq_int_u16 (__a
, __b
);
21476 __extension__
extern __inline uint32x4_t
21477 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21478 __arm_vmullbq_int (uint16x8_t __a
, uint16x8_t __b
)
21480 return __arm_vmullbq_int_u16 (__a
, __b
);
21483 __extension__
extern __inline uint16x8_t
21484 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21485 __arm_vmulhq (uint16x8_t __a
, uint16x8_t __b
)
21487 return __arm_vmulhq_u16 (__a
, __b
);
21490 __extension__
extern __inline
uint32_t
21491 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21492 __arm_vmladavq (uint16x8_t __a
, uint16x8_t __b
)
21494 return __arm_vmladavq_u16 (__a
, __b
);
21497 __extension__
extern __inline
uint16_t
21498 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21499 __arm_vminvq (uint16_t __a
, uint16x8_t __b
)
21501 return __arm_vminvq_u16 (__a
, __b
);
21504 __extension__
extern __inline uint16x8_t
21505 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21506 __arm_vminq (uint16x8_t __a
, uint16x8_t __b
)
21508 return __arm_vminq_u16 (__a
, __b
);
21511 __extension__
extern __inline
uint16_t
21512 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21513 __arm_vmaxvq (uint16_t __a
, uint16x8_t __b
)
21515 return __arm_vmaxvq_u16 (__a
, __b
);
21518 __extension__
extern __inline uint16x8_t
21519 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21520 __arm_vmaxq (uint16x8_t __a
, uint16x8_t __b
)
21522 return __arm_vmaxq_u16 (__a
, __b
);
21525 __extension__
extern __inline uint16x8_t
21526 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21527 __arm_vhsubq (uint16x8_t __a
, uint16x8_t __b
)
21529 return __arm_vhsubq_u16 (__a
, __b
);
21532 __extension__
extern __inline uint16x8_t
21533 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21534 __arm_vhsubq (uint16x8_t __a
, uint16_t __b
)
21536 return __arm_vhsubq_n_u16 (__a
, __b
);
21539 __extension__
extern __inline uint16x8_t
21540 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21541 __arm_vhaddq (uint16x8_t __a
, uint16x8_t __b
)
21543 return __arm_vhaddq_u16 (__a
, __b
);
21546 __extension__
extern __inline uint16x8_t
21547 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21548 __arm_vhaddq (uint16x8_t __a
, uint16_t __b
)
21550 return __arm_vhaddq_n_u16 (__a
, __b
);
21553 __extension__
extern __inline uint16x8_t
21554 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21555 __arm_veorq (uint16x8_t __a
, uint16x8_t __b
)
21557 return __arm_veorq_u16 (__a
, __b
);
21560 __extension__
extern __inline mve_pred16_t
21561 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21562 __arm_vcmpneq (uint16x8_t __a
, uint16_t __b
)
21564 return __arm_vcmpneq_n_u16 (__a
, __b
);
21567 __extension__
extern __inline mve_pred16_t
21568 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21569 __arm_vcmphiq (uint16x8_t __a
, uint16x8_t __b
)
21571 return __arm_vcmphiq_u16 (__a
, __b
);
21574 __extension__
extern __inline mve_pred16_t
21575 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21576 __arm_vcmphiq (uint16x8_t __a
, uint16_t __b
)
21578 return __arm_vcmphiq_n_u16 (__a
, __b
);
21581 __extension__
extern __inline mve_pred16_t
21582 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21583 __arm_vcmpeqq (uint16x8_t __a
, uint16x8_t __b
)
21585 return __arm_vcmpeqq_u16 (__a
, __b
);
21588 __extension__
extern __inline mve_pred16_t
21589 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21590 __arm_vcmpeqq (uint16x8_t __a
, uint16_t __b
)
21592 return __arm_vcmpeqq_n_u16 (__a
, __b
);
21595 __extension__
extern __inline mve_pred16_t
21596 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21597 __arm_vcmpcsq (uint16x8_t __a
, uint16x8_t __b
)
21599 return __arm_vcmpcsq_u16 (__a
, __b
);
21602 __extension__
extern __inline mve_pred16_t
21603 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21604 __arm_vcmpcsq (uint16x8_t __a
, uint16_t __b
)
21606 return __arm_vcmpcsq_n_u16 (__a
, __b
);
21609 __extension__
extern __inline uint16x8_t
21610 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21611 __arm_vcaddq_rot90 (uint16x8_t __a
, uint16x8_t __b
)
21613 return __arm_vcaddq_rot90_u16 (__a
, __b
);
21616 __extension__
extern __inline uint16x8_t
21617 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21618 __arm_vcaddq_rot270 (uint16x8_t __a
, uint16x8_t __b
)
21620 return __arm_vcaddq_rot270_u16 (__a
, __b
);
21623 __extension__
extern __inline uint16x8_t
21624 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21625 __arm_vbicq (uint16x8_t __a
, uint16x8_t __b
)
21627 return __arm_vbicq_u16 (__a
, __b
);
21630 __extension__
extern __inline uint16x8_t
21631 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21632 __arm_vandq (uint16x8_t __a
, uint16x8_t __b
)
21634 return __arm_vandq_u16 (__a
, __b
);
21637 __extension__
extern __inline
uint32_t
21638 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21639 __arm_vaddvq_p (uint16x8_t __a
, mve_pred16_t __p
)
21641 return __arm_vaddvq_p_u16 (__a
, __p
);
21644 __extension__
extern __inline
uint32_t
21645 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21646 __arm_vaddvaq (uint32_t __a
, uint16x8_t __b
)
21648 return __arm_vaddvaq_u16 (__a
, __b
);
21651 __extension__
extern __inline uint16x8_t
21652 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21653 __arm_vaddq (uint16x8_t __a
, uint16_t __b
)
21655 return __arm_vaddq_n_u16 (__a
, __b
);
21658 __extension__
extern __inline uint16x8_t
21659 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21660 __arm_vabdq (uint16x8_t __a
, uint16x8_t __b
)
21662 return __arm_vabdq_u16 (__a
, __b
);
21665 __extension__
extern __inline uint16x8_t
21666 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21667 __arm_vshlq_r (uint16x8_t __a
, int32_t __b
)
21669 return __arm_vshlq_r_u16 (__a
, __b
);
21672 __extension__
extern __inline uint16x8_t
21673 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21674 __arm_vrshlq (uint16x8_t __a
, int16x8_t __b
)
21676 return __arm_vrshlq_u16 (__a
, __b
);
21679 __extension__
extern __inline uint16x8_t
21680 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21681 __arm_vrshlq (uint16x8_t __a
, int32_t __b
)
21683 return __arm_vrshlq_n_u16 (__a
, __b
);
21686 __extension__
extern __inline uint16x8_t
21687 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21688 __arm_vqshlq (uint16x8_t __a
, int16x8_t __b
)
21690 return __arm_vqshlq_u16 (__a
, __b
);
21693 __extension__
extern __inline uint16x8_t
21694 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21695 __arm_vqshlq_r (uint16x8_t __a
, int32_t __b
)
21697 return __arm_vqshlq_r_u16 (__a
, __b
);
21700 __extension__
extern __inline uint16x8_t
21701 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21702 __arm_vqrshlq (uint16x8_t __a
, int16x8_t __b
)
21704 return __arm_vqrshlq_u16 (__a
, __b
);
21707 __extension__
extern __inline uint16x8_t
21708 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21709 __arm_vqrshlq (uint16x8_t __a
, int32_t __b
)
21711 return __arm_vqrshlq_n_u16 (__a
, __b
);
21714 __extension__
extern __inline
uint16_t
21715 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21716 __arm_vminavq (uint16_t __a
, int16x8_t __b
)
21718 return __arm_vminavq_s16 (__a
, __b
);
21721 __extension__
extern __inline uint16x8_t
21722 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21723 __arm_vminaq (uint16x8_t __a
, int16x8_t __b
)
21725 return __arm_vminaq_s16 (__a
, __b
);
21728 __extension__
extern __inline
uint16_t
21729 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21730 __arm_vmaxavq (uint16_t __a
, int16x8_t __b
)
21732 return __arm_vmaxavq_s16 (__a
, __b
);
21735 __extension__
extern __inline uint16x8_t
21736 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21737 __arm_vmaxaq (uint16x8_t __a
, int16x8_t __b
)
21739 return __arm_vmaxaq_s16 (__a
, __b
);
21742 __extension__
extern __inline uint16x8_t
21743 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21744 __arm_vbrsrq (uint16x8_t __a
, int32_t __b
)
21746 return __arm_vbrsrq_n_u16 (__a
, __b
);
21749 __extension__
extern __inline uint16x8_t
21750 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21751 __arm_vshlq_n (uint16x8_t __a
, const int __imm
)
21753 return __arm_vshlq_n_u16 (__a
, __imm
);
21756 __extension__
extern __inline uint16x8_t
21757 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21758 __arm_vrshrq (uint16x8_t __a
, const int __imm
)
21760 return __arm_vrshrq_n_u16 (__a
, __imm
);
21763 __extension__
extern __inline uint16x8_t
21764 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21765 __arm_vqshlq_n (uint16x8_t __a
, const int __imm
)
21767 return __arm_vqshlq_n_u16 (__a
, __imm
);
21770 __extension__
extern __inline mve_pred16_t
21771 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21772 __arm_vcmpneq (int16x8_t __a
, int16_t __b
)
21774 return __arm_vcmpneq_n_s16 (__a
, __b
);
21777 __extension__
extern __inline mve_pred16_t
21778 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21779 __arm_vcmpltq (int16x8_t __a
, int16x8_t __b
)
21781 return __arm_vcmpltq_s16 (__a
, __b
);
21784 __extension__
extern __inline mve_pred16_t
21785 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21786 __arm_vcmpltq (int16x8_t __a
, int16_t __b
)
21788 return __arm_vcmpltq_n_s16 (__a
, __b
);
21791 __extension__
extern __inline mve_pred16_t
21792 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21793 __arm_vcmpleq (int16x8_t __a
, int16x8_t __b
)
21795 return __arm_vcmpleq_s16 (__a
, __b
);
21798 __extension__
extern __inline mve_pred16_t
21799 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21800 __arm_vcmpleq (int16x8_t __a
, int16_t __b
)
21802 return __arm_vcmpleq_n_s16 (__a
, __b
);
21805 __extension__
extern __inline mve_pred16_t
21806 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21807 __arm_vcmpgtq (int16x8_t __a
, int16x8_t __b
)
21809 return __arm_vcmpgtq_s16 (__a
, __b
);
21812 __extension__
extern __inline mve_pred16_t
21813 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21814 __arm_vcmpgtq (int16x8_t __a
, int16_t __b
)
21816 return __arm_vcmpgtq_n_s16 (__a
, __b
);
21819 __extension__
extern __inline mve_pred16_t
21820 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21821 __arm_vcmpgeq (int16x8_t __a
, int16x8_t __b
)
21823 return __arm_vcmpgeq_s16 (__a
, __b
);
21826 __extension__
extern __inline mve_pred16_t
21827 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21828 __arm_vcmpgeq (int16x8_t __a
, int16_t __b
)
21830 return __arm_vcmpgeq_n_s16 (__a
, __b
);
21833 __extension__
extern __inline mve_pred16_t
21834 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21835 __arm_vcmpeqq (int16x8_t __a
, int16x8_t __b
)
21837 return __arm_vcmpeqq_s16 (__a
, __b
);
21840 __extension__
extern __inline mve_pred16_t
21841 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21842 __arm_vcmpeqq (int16x8_t __a
, int16_t __b
)
21844 return __arm_vcmpeqq_n_s16 (__a
, __b
);
21847 __extension__
extern __inline uint16x8_t
21848 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21849 __arm_vqshluq (int16x8_t __a
, const int __imm
)
21851 return __arm_vqshluq_n_s16 (__a
, __imm
);
21854 __extension__
extern __inline
int32_t
21855 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21856 __arm_vaddvq_p (int16x8_t __a
, mve_pred16_t __p
)
21858 return __arm_vaddvq_p_s16 (__a
, __p
);
21861 __extension__
extern __inline int16x8_t
21862 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21863 __arm_vsubq (int16x8_t __a
, int16x8_t __b
)
21865 return __arm_vsubq_s16 (__a
, __b
);
21868 __extension__
extern __inline int16x8_t
21869 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21870 __arm_vsubq (int16x8_t __a
, int16_t __b
)
21872 return __arm_vsubq_n_s16 (__a
, __b
);
21875 __extension__
extern __inline int16x8_t
21876 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21877 __arm_vshlq_r (int16x8_t __a
, int32_t __b
)
21879 return __arm_vshlq_r_s16 (__a
, __b
);
21882 __extension__
extern __inline int16x8_t
21883 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21884 __arm_vrshlq (int16x8_t __a
, int16x8_t __b
)
21886 return __arm_vrshlq_s16 (__a
, __b
);
21889 __extension__
extern __inline int16x8_t
21890 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21891 __arm_vrshlq (int16x8_t __a
, int32_t __b
)
21893 return __arm_vrshlq_n_s16 (__a
, __b
);
21896 __extension__
extern __inline int16x8_t
21897 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21898 __arm_vrmulhq (int16x8_t __a
, int16x8_t __b
)
21900 return __arm_vrmulhq_s16 (__a
, __b
);
21903 __extension__
extern __inline int16x8_t
21904 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21905 __arm_vrhaddq (int16x8_t __a
, int16x8_t __b
)
21907 return __arm_vrhaddq_s16 (__a
, __b
);
21910 __extension__
extern __inline int16x8_t
21911 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21912 __arm_vqsubq (int16x8_t __a
, int16x8_t __b
)
21914 return __arm_vqsubq_s16 (__a
, __b
);
21917 __extension__
extern __inline int16x8_t
21918 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21919 __arm_vqsubq (int16x8_t __a
, int16_t __b
)
21921 return __arm_vqsubq_n_s16 (__a
, __b
);
21924 __extension__
extern __inline int16x8_t
21925 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21926 __arm_vqshlq (int16x8_t __a
, int16x8_t __b
)
21928 return __arm_vqshlq_s16 (__a
, __b
);
21931 __extension__
extern __inline int16x8_t
21932 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21933 __arm_vqshlq_r (int16x8_t __a
, int32_t __b
)
21935 return __arm_vqshlq_r_s16 (__a
, __b
);
21938 __extension__
extern __inline int16x8_t
21939 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21940 __arm_vqrshlq (int16x8_t __a
, int16x8_t __b
)
21942 return __arm_vqrshlq_s16 (__a
, __b
);
21945 __extension__
extern __inline int16x8_t
21946 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21947 __arm_vqrshlq (int16x8_t __a
, int32_t __b
)
21949 return __arm_vqrshlq_n_s16 (__a
, __b
);
21952 __extension__
extern __inline int16x8_t
21953 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21954 __arm_vqrdmulhq (int16x8_t __a
, int16x8_t __b
)
21956 return __arm_vqrdmulhq_s16 (__a
, __b
);
21959 __extension__
extern __inline int16x8_t
21960 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21961 __arm_vqrdmulhq (int16x8_t __a
, int16_t __b
)
21963 return __arm_vqrdmulhq_n_s16 (__a
, __b
);
21966 __extension__
extern __inline int16x8_t
21967 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21968 __arm_vqdmulhq (int16x8_t __a
, int16x8_t __b
)
21970 return __arm_vqdmulhq_s16 (__a
, __b
);
21973 __extension__
extern __inline int16x8_t
21974 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21975 __arm_vqdmulhq (int16x8_t __a
, int16_t __b
)
21977 return __arm_vqdmulhq_n_s16 (__a
, __b
);
21980 __extension__
extern __inline int16x8_t
21981 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21982 __arm_vqaddq (int16x8_t __a
, int16x8_t __b
)
21984 return __arm_vqaddq_s16 (__a
, __b
);
21987 __extension__
extern __inline int16x8_t
21988 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21989 __arm_vqaddq (int16x8_t __a
, int16_t __b
)
21991 return __arm_vqaddq_n_s16 (__a
, __b
);
21994 __extension__
extern __inline int16x8_t
21995 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
21996 __arm_vorrq (int16x8_t __a
, int16x8_t __b
)
21998 return __arm_vorrq_s16 (__a
, __b
);
22001 __extension__
extern __inline int16x8_t
22002 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22003 __arm_vornq (int16x8_t __a
, int16x8_t __b
)
22005 return __arm_vornq_s16 (__a
, __b
);
22008 __extension__
extern __inline int16x8_t
22009 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22010 __arm_vmulq (int16x8_t __a
, int16x8_t __b
)
22012 return __arm_vmulq_s16 (__a
, __b
);
22015 __extension__
extern __inline int16x8_t
22016 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22017 __arm_vmulq (int16x8_t __a
, int16_t __b
)
22019 return __arm_vmulq_n_s16 (__a
, __b
);
22022 __extension__
extern __inline int32x4_t
22023 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22024 __arm_vmulltq_int (int16x8_t __a
, int16x8_t __b
)
22026 return __arm_vmulltq_int_s16 (__a
, __b
);
22029 __extension__
extern __inline int32x4_t
22030 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22031 __arm_vmullbq_int (int16x8_t __a
, int16x8_t __b
)
22033 return __arm_vmullbq_int_s16 (__a
, __b
);
22036 __extension__
extern __inline int16x8_t
22037 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22038 __arm_vmulhq (int16x8_t __a
, int16x8_t __b
)
22040 return __arm_vmulhq_s16 (__a
, __b
);
22043 __extension__
extern __inline
int32_t
22044 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22045 __arm_vmlsdavxq (int16x8_t __a
, int16x8_t __b
)
22047 return __arm_vmlsdavxq_s16 (__a
, __b
);
22050 __extension__
extern __inline
int32_t
22051 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22052 __arm_vmlsdavq (int16x8_t __a
, int16x8_t __b
)
22054 return __arm_vmlsdavq_s16 (__a
, __b
);
22057 __extension__
extern __inline
int32_t
22058 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22059 __arm_vmladavxq (int16x8_t __a
, int16x8_t __b
)
22061 return __arm_vmladavxq_s16 (__a
, __b
);
22064 __extension__
extern __inline
int32_t
22065 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22066 __arm_vmladavq (int16x8_t __a
, int16x8_t __b
)
22068 return __arm_vmladavq_s16 (__a
, __b
);
22071 __extension__
extern __inline
int16_t
22072 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22073 __arm_vminvq (int16_t __a
, int16x8_t __b
)
22075 return __arm_vminvq_s16 (__a
, __b
);
22078 __extension__
extern __inline int16x8_t
22079 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22080 __arm_vminq (int16x8_t __a
, int16x8_t __b
)
22082 return __arm_vminq_s16 (__a
, __b
);
22085 __extension__
extern __inline
int16_t
22086 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22087 __arm_vmaxvq (int16_t __a
, int16x8_t __b
)
22089 return __arm_vmaxvq_s16 (__a
, __b
);
22092 __extension__
extern __inline int16x8_t
22093 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22094 __arm_vmaxq (int16x8_t __a
, int16x8_t __b
)
22096 return __arm_vmaxq_s16 (__a
, __b
);
22099 __extension__
extern __inline int16x8_t
22100 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22101 __arm_vhsubq (int16x8_t __a
, int16x8_t __b
)
22103 return __arm_vhsubq_s16 (__a
, __b
);
22106 __extension__
extern __inline int16x8_t
22107 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22108 __arm_vhsubq (int16x8_t __a
, int16_t __b
)
22110 return __arm_vhsubq_n_s16 (__a
, __b
);
22113 __extension__
extern __inline int16x8_t
22114 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22115 __arm_vhcaddq_rot90 (int16x8_t __a
, int16x8_t __b
)
22117 return __arm_vhcaddq_rot90_s16 (__a
, __b
);
22120 __extension__
extern __inline int16x8_t
22121 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22122 __arm_vhcaddq_rot270 (int16x8_t __a
, int16x8_t __b
)
22124 return __arm_vhcaddq_rot270_s16 (__a
, __b
);
22127 __extension__
extern __inline int16x8_t
22128 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22129 __arm_vhaddq (int16x8_t __a
, int16x8_t __b
)
22131 return __arm_vhaddq_s16 (__a
, __b
);
22134 __extension__
extern __inline int16x8_t
22135 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22136 __arm_vhaddq (int16x8_t __a
, int16_t __b
)
22138 return __arm_vhaddq_n_s16 (__a
, __b
);
22141 __extension__
extern __inline int16x8_t
22142 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22143 __arm_veorq (int16x8_t __a
, int16x8_t __b
)
22145 return __arm_veorq_s16 (__a
, __b
);
22148 __extension__
extern __inline int16x8_t
22149 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22150 __arm_vcaddq_rot90 (int16x8_t __a
, int16x8_t __b
)
22152 return __arm_vcaddq_rot90_s16 (__a
, __b
);
22155 __extension__
extern __inline int16x8_t
22156 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22157 __arm_vcaddq_rot270 (int16x8_t __a
, int16x8_t __b
)
22159 return __arm_vcaddq_rot270_s16 (__a
, __b
);
22162 __extension__
extern __inline int16x8_t
22163 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22164 __arm_vbrsrq (int16x8_t __a
, int32_t __b
)
22166 return __arm_vbrsrq_n_s16 (__a
, __b
);
22169 __extension__
extern __inline int16x8_t
22170 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22171 __arm_vbicq (int16x8_t __a
, int16x8_t __b
)
22173 return __arm_vbicq_s16 (__a
, __b
);
22176 __extension__
extern __inline int16x8_t
22177 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22178 __arm_vandq (int16x8_t __a
, int16x8_t __b
)
22180 return __arm_vandq_s16 (__a
, __b
);
22183 __extension__
extern __inline
int32_t
22184 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22185 __arm_vaddvaq (int32_t __a
, int16x8_t __b
)
22187 return __arm_vaddvaq_s16 (__a
, __b
);
22190 __extension__
extern __inline int16x8_t
22191 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22192 __arm_vaddq (int16x8_t __a
, int16_t __b
)
22194 return __arm_vaddq_n_s16 (__a
, __b
);
22197 __extension__
extern __inline int16x8_t
22198 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22199 __arm_vabdq (int16x8_t __a
, int16x8_t __b
)
22201 return __arm_vabdq_s16 (__a
, __b
);
22204 __extension__
extern __inline int16x8_t
22205 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22206 __arm_vshlq_n (int16x8_t __a
, const int __imm
)
22208 return __arm_vshlq_n_s16 (__a
, __imm
);
22211 __extension__
extern __inline int16x8_t
22212 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22213 __arm_vrshrq (int16x8_t __a
, const int __imm
)
22215 return __arm_vrshrq_n_s16 (__a
, __imm
);
22218 __extension__
extern __inline int16x8_t
22219 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22220 __arm_vqshlq_n (int16x8_t __a
, const int __imm
)
22222 return __arm_vqshlq_n_s16 (__a
, __imm
);
22225 __extension__
extern __inline uint32x4_t
22226 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22227 __arm_vsubq (uint32x4_t __a
, uint32x4_t __b
)
22229 return __arm_vsubq_u32 (__a
, __b
);
22232 __extension__
extern __inline uint32x4_t
22233 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22234 __arm_vsubq (uint32x4_t __a
, uint32_t __b
)
22236 return __arm_vsubq_n_u32 (__a
, __b
);
22239 __extension__
extern __inline uint32x4_t
22240 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22241 __arm_vrmulhq (uint32x4_t __a
, uint32x4_t __b
)
22243 return __arm_vrmulhq_u32 (__a
, __b
);
22246 __extension__
extern __inline uint32x4_t
22247 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22248 __arm_vrhaddq (uint32x4_t __a
, uint32x4_t __b
)
22250 return __arm_vrhaddq_u32 (__a
, __b
);
22253 __extension__
extern __inline uint32x4_t
22254 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22255 __arm_vqsubq (uint32x4_t __a
, uint32x4_t __b
)
22257 return __arm_vqsubq_u32 (__a
, __b
);
22260 __extension__
extern __inline uint32x4_t
22261 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22262 __arm_vqsubq (uint32x4_t __a
, uint32_t __b
)
22264 return __arm_vqsubq_n_u32 (__a
, __b
);
22267 __extension__
extern __inline uint32x4_t
22268 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22269 __arm_vqaddq (uint32x4_t __a
, uint32x4_t __b
)
22271 return __arm_vqaddq_u32 (__a
, __b
);
22274 __extension__
extern __inline uint32x4_t
22275 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22276 __arm_vqaddq (uint32x4_t __a
, uint32_t __b
)
22278 return __arm_vqaddq_n_u32 (__a
, __b
);
22281 __extension__
extern __inline uint32x4_t
22282 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22283 __arm_vorrq (uint32x4_t __a
, uint32x4_t __b
)
22285 return __arm_vorrq_u32 (__a
, __b
);
22288 __extension__
extern __inline uint32x4_t
22289 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22290 __arm_vornq (uint32x4_t __a
, uint32x4_t __b
)
22292 return __arm_vornq_u32 (__a
, __b
);
22295 __extension__
extern __inline uint32x4_t
22296 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22297 __arm_vmulq (uint32x4_t __a
, uint32x4_t __b
)
22299 return __arm_vmulq_u32 (__a
, __b
);
22302 __extension__
extern __inline uint32x4_t
22303 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22304 __arm_vmulq (uint32x4_t __a
, uint32_t __b
)
22306 return __arm_vmulq_n_u32 (__a
, __b
);
22309 __extension__
extern __inline uint64x2_t
22310 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22311 __arm_vmulltq_int (uint32x4_t __a
, uint32x4_t __b
)
22313 return __arm_vmulltq_int_u32 (__a
, __b
);
22316 __extension__
extern __inline uint64x2_t
22317 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22318 __arm_vmullbq_int (uint32x4_t __a
, uint32x4_t __b
)
22320 return __arm_vmullbq_int_u32 (__a
, __b
);
22323 __extension__
extern __inline uint32x4_t
22324 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22325 __arm_vmulhq (uint32x4_t __a
, uint32x4_t __b
)
22327 return __arm_vmulhq_u32 (__a
, __b
);
22330 __extension__
extern __inline
uint32_t
22331 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22332 __arm_vmladavq (uint32x4_t __a
, uint32x4_t __b
)
22334 return __arm_vmladavq_u32 (__a
, __b
);
22337 __extension__
extern __inline
uint32_t
22338 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22339 __arm_vminvq (uint32_t __a
, uint32x4_t __b
)
22341 return __arm_vminvq_u32 (__a
, __b
);
22344 __extension__
extern __inline uint32x4_t
22345 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22346 __arm_vminq (uint32x4_t __a
, uint32x4_t __b
)
22348 return __arm_vminq_u32 (__a
, __b
);
22351 __extension__
extern __inline
uint32_t
22352 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22353 __arm_vmaxvq (uint32_t __a
, uint32x4_t __b
)
22355 return __arm_vmaxvq_u32 (__a
, __b
);
22358 __extension__
extern __inline uint32x4_t
22359 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22360 __arm_vmaxq (uint32x4_t __a
, uint32x4_t __b
)
22362 return __arm_vmaxq_u32 (__a
, __b
);
22365 __extension__
extern __inline uint32x4_t
22366 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22367 __arm_vhsubq (uint32x4_t __a
, uint32x4_t __b
)
22369 return __arm_vhsubq_u32 (__a
, __b
);
22372 __extension__
extern __inline uint32x4_t
22373 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22374 __arm_vhsubq (uint32x4_t __a
, uint32_t __b
)
22376 return __arm_vhsubq_n_u32 (__a
, __b
);
22379 __extension__
extern __inline uint32x4_t
22380 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22381 __arm_vhaddq (uint32x4_t __a
, uint32x4_t __b
)
22383 return __arm_vhaddq_u32 (__a
, __b
);
22386 __extension__
extern __inline uint32x4_t
22387 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22388 __arm_vhaddq (uint32x4_t __a
, uint32_t __b
)
22390 return __arm_vhaddq_n_u32 (__a
, __b
);
22393 __extension__
extern __inline uint32x4_t
22394 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22395 __arm_veorq (uint32x4_t __a
, uint32x4_t __b
)
22397 return __arm_veorq_u32 (__a
, __b
);
22400 __extension__
extern __inline mve_pred16_t
22401 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22402 __arm_vcmpneq (uint32x4_t __a
, uint32_t __b
)
22404 return __arm_vcmpneq_n_u32 (__a
, __b
);
22407 __extension__
extern __inline mve_pred16_t
22408 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22409 __arm_vcmphiq (uint32x4_t __a
, uint32x4_t __b
)
22411 return __arm_vcmphiq_u32 (__a
, __b
);
22414 __extension__
extern __inline mve_pred16_t
22415 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22416 __arm_vcmphiq (uint32x4_t __a
, uint32_t __b
)
22418 return __arm_vcmphiq_n_u32 (__a
, __b
);
22421 __extension__
extern __inline mve_pred16_t
22422 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22423 __arm_vcmpeqq (uint32x4_t __a
, uint32x4_t __b
)
22425 return __arm_vcmpeqq_u32 (__a
, __b
);
22428 __extension__
extern __inline mve_pred16_t
22429 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22430 __arm_vcmpeqq (uint32x4_t __a
, uint32_t __b
)
22432 return __arm_vcmpeqq_n_u32 (__a
, __b
);
22435 __extension__
extern __inline mve_pred16_t
22436 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22437 __arm_vcmpcsq (uint32x4_t __a
, uint32x4_t __b
)
22439 return __arm_vcmpcsq_u32 (__a
, __b
);
22442 __extension__
extern __inline mve_pred16_t
22443 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22444 __arm_vcmpcsq (uint32x4_t __a
, uint32_t __b
)
22446 return __arm_vcmpcsq_n_u32 (__a
, __b
);
22449 __extension__
extern __inline uint32x4_t
22450 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22451 __arm_vcaddq_rot90 (uint32x4_t __a
, uint32x4_t __b
)
22453 return __arm_vcaddq_rot90_u32 (__a
, __b
);
22456 __extension__
extern __inline uint32x4_t
22457 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22458 __arm_vcaddq_rot270 (uint32x4_t __a
, uint32x4_t __b
)
22460 return __arm_vcaddq_rot270_u32 (__a
, __b
);
22463 __extension__
extern __inline uint32x4_t
22464 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22465 __arm_vbicq (uint32x4_t __a
, uint32x4_t __b
)
22467 return __arm_vbicq_u32 (__a
, __b
);
22470 __extension__
extern __inline uint32x4_t
22471 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22472 __arm_vandq (uint32x4_t __a
, uint32x4_t __b
)
22474 return __arm_vandq_u32 (__a
, __b
);
22477 __extension__
extern __inline
uint32_t
22478 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22479 __arm_vaddvq_p (uint32x4_t __a
, mve_pred16_t __p
)
22481 return __arm_vaddvq_p_u32 (__a
, __p
);
22484 __extension__
extern __inline
uint32_t
22485 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22486 __arm_vaddvaq (uint32_t __a
, uint32x4_t __b
)
22488 return __arm_vaddvaq_u32 (__a
, __b
);
22491 __extension__
extern __inline uint32x4_t
22492 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22493 __arm_vaddq (uint32x4_t __a
, uint32_t __b
)
22495 return __arm_vaddq_n_u32 (__a
, __b
);
22498 __extension__
extern __inline uint32x4_t
22499 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22500 __arm_vabdq (uint32x4_t __a
, uint32x4_t __b
)
22502 return __arm_vabdq_u32 (__a
, __b
);
22505 __extension__
extern __inline uint32x4_t
22506 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22507 __arm_vshlq_r (uint32x4_t __a
, int32_t __b
)
22509 return __arm_vshlq_r_u32 (__a
, __b
);
22512 __extension__
extern __inline uint32x4_t
22513 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22514 __arm_vrshlq (uint32x4_t __a
, int32x4_t __b
)
22516 return __arm_vrshlq_u32 (__a
, __b
);
22519 __extension__
extern __inline uint32x4_t
22520 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22521 __arm_vrshlq (uint32x4_t __a
, int32_t __b
)
22523 return __arm_vrshlq_n_u32 (__a
, __b
);
22526 __extension__
extern __inline uint32x4_t
22527 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22528 __arm_vqshlq (uint32x4_t __a
, int32x4_t __b
)
22530 return __arm_vqshlq_u32 (__a
, __b
);
22533 __extension__
extern __inline uint32x4_t
22534 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22535 __arm_vqshlq_r (uint32x4_t __a
, int32_t __b
)
22537 return __arm_vqshlq_r_u32 (__a
, __b
);
22540 __extension__
extern __inline uint32x4_t
22541 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22542 __arm_vqrshlq (uint32x4_t __a
, int32x4_t __b
)
22544 return __arm_vqrshlq_u32 (__a
, __b
);
22547 __extension__
extern __inline uint32x4_t
22548 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22549 __arm_vqrshlq (uint32x4_t __a
, int32_t __b
)
22551 return __arm_vqrshlq_n_u32 (__a
, __b
);
22554 __extension__
extern __inline
uint32_t
22555 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22556 __arm_vminavq (uint32_t __a
, int32x4_t __b
)
22558 return __arm_vminavq_s32 (__a
, __b
);
22561 __extension__
extern __inline uint32x4_t
22562 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22563 __arm_vminaq (uint32x4_t __a
, int32x4_t __b
)
22565 return __arm_vminaq_s32 (__a
, __b
);
22568 __extension__
extern __inline
uint32_t
22569 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22570 __arm_vmaxavq (uint32_t __a
, int32x4_t __b
)
22572 return __arm_vmaxavq_s32 (__a
, __b
);
22575 __extension__
extern __inline uint32x4_t
22576 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22577 __arm_vmaxaq (uint32x4_t __a
, int32x4_t __b
)
22579 return __arm_vmaxaq_s32 (__a
, __b
);
22582 __extension__
extern __inline uint32x4_t
22583 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22584 __arm_vbrsrq (uint32x4_t __a
, int32_t __b
)
22586 return __arm_vbrsrq_n_u32 (__a
, __b
);
22589 __extension__
extern __inline uint32x4_t
22590 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22591 __arm_vshlq_n (uint32x4_t __a
, const int __imm
)
22593 return __arm_vshlq_n_u32 (__a
, __imm
);
22596 __extension__
extern __inline uint32x4_t
22597 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22598 __arm_vrshrq (uint32x4_t __a
, const int __imm
)
22600 return __arm_vrshrq_n_u32 (__a
, __imm
);
22603 __extension__
extern __inline uint32x4_t
22604 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22605 __arm_vqshlq_n (uint32x4_t __a
, const int __imm
)
22607 return __arm_vqshlq_n_u32 (__a
, __imm
);
22610 __extension__
extern __inline mve_pred16_t
22611 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22612 __arm_vcmpneq (int32x4_t __a
, int32_t __b
)
22614 return __arm_vcmpneq_n_s32 (__a
, __b
);
22617 __extension__
extern __inline mve_pred16_t
22618 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22619 __arm_vcmpltq (int32x4_t __a
, int32x4_t __b
)
22621 return __arm_vcmpltq_s32 (__a
, __b
);
22624 __extension__
extern __inline mve_pred16_t
22625 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22626 __arm_vcmpltq (int32x4_t __a
, int32_t __b
)
22628 return __arm_vcmpltq_n_s32 (__a
, __b
);
22631 __extension__
extern __inline mve_pred16_t
22632 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22633 __arm_vcmpleq (int32x4_t __a
, int32x4_t __b
)
22635 return __arm_vcmpleq_s32 (__a
, __b
);
22638 __extension__
extern __inline mve_pred16_t
22639 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22640 __arm_vcmpleq (int32x4_t __a
, int32_t __b
)
22642 return __arm_vcmpleq_n_s32 (__a
, __b
);
22645 __extension__
extern __inline mve_pred16_t
22646 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22647 __arm_vcmpgtq (int32x4_t __a
, int32x4_t __b
)
22649 return __arm_vcmpgtq_s32 (__a
, __b
);
22652 __extension__
extern __inline mve_pred16_t
22653 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22654 __arm_vcmpgtq (int32x4_t __a
, int32_t __b
)
22656 return __arm_vcmpgtq_n_s32 (__a
, __b
);
22659 __extension__
extern __inline mve_pred16_t
22660 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22661 __arm_vcmpgeq (int32x4_t __a
, int32x4_t __b
)
22663 return __arm_vcmpgeq_s32 (__a
, __b
);
22666 __extension__
extern __inline mve_pred16_t
22667 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22668 __arm_vcmpgeq (int32x4_t __a
, int32_t __b
)
22670 return __arm_vcmpgeq_n_s32 (__a
, __b
);
22673 __extension__
extern __inline mve_pred16_t
22674 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22675 __arm_vcmpeqq (int32x4_t __a
, int32x4_t __b
)
22677 return __arm_vcmpeqq_s32 (__a
, __b
);
22680 __extension__
extern __inline mve_pred16_t
22681 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22682 __arm_vcmpeqq (int32x4_t __a
, int32_t __b
)
22684 return __arm_vcmpeqq_n_s32 (__a
, __b
);
22687 __extension__
extern __inline uint32x4_t
22688 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22689 __arm_vqshluq (int32x4_t __a
, const int __imm
)
22691 return __arm_vqshluq_n_s32 (__a
, __imm
);
22694 __extension__
extern __inline
int32_t
22695 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22696 __arm_vaddvq_p (int32x4_t __a
, mve_pred16_t __p
)
22698 return __arm_vaddvq_p_s32 (__a
, __p
);
22701 __extension__
extern __inline int32x4_t
22702 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22703 __arm_vsubq (int32x4_t __a
, int32x4_t __b
)
22705 return __arm_vsubq_s32 (__a
, __b
);
22708 __extension__
extern __inline int32x4_t
22709 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22710 __arm_vsubq (int32x4_t __a
, int32_t __b
)
22712 return __arm_vsubq_n_s32 (__a
, __b
);
22715 __extension__
extern __inline int32x4_t
22716 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22717 __arm_vshlq_r (int32x4_t __a
, int32_t __b
)
22719 return __arm_vshlq_r_s32 (__a
, __b
);
22722 __extension__
extern __inline int32x4_t
22723 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22724 __arm_vrshlq (int32x4_t __a
, int32x4_t __b
)
22726 return __arm_vrshlq_s32 (__a
, __b
);
22729 __extension__
extern __inline int32x4_t
22730 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22731 __arm_vrshlq (int32x4_t __a
, int32_t __b
)
22733 return __arm_vrshlq_n_s32 (__a
, __b
);
22736 __extension__
extern __inline int32x4_t
22737 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22738 __arm_vrmulhq (int32x4_t __a
, int32x4_t __b
)
22740 return __arm_vrmulhq_s32 (__a
, __b
);
22743 __extension__
extern __inline int32x4_t
22744 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22745 __arm_vrhaddq (int32x4_t __a
, int32x4_t __b
)
22747 return __arm_vrhaddq_s32 (__a
, __b
);
22750 __extension__
extern __inline int32x4_t
22751 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22752 __arm_vqsubq (int32x4_t __a
, int32x4_t __b
)
22754 return __arm_vqsubq_s32 (__a
, __b
);
22757 __extension__
extern __inline int32x4_t
22758 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22759 __arm_vqsubq (int32x4_t __a
, int32_t __b
)
22761 return __arm_vqsubq_n_s32 (__a
, __b
);
22764 __extension__
extern __inline int32x4_t
22765 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22766 __arm_vqshlq (int32x4_t __a
, int32x4_t __b
)
22768 return __arm_vqshlq_s32 (__a
, __b
);
22771 __extension__
extern __inline int32x4_t
22772 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22773 __arm_vqshlq_r (int32x4_t __a
, int32_t __b
)
22775 return __arm_vqshlq_r_s32 (__a
, __b
);
22778 __extension__
extern __inline int32x4_t
22779 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22780 __arm_vqrshlq (int32x4_t __a
, int32x4_t __b
)
22782 return __arm_vqrshlq_s32 (__a
, __b
);
22785 __extension__
extern __inline int32x4_t
22786 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22787 __arm_vqrshlq (int32x4_t __a
, int32_t __b
)
22789 return __arm_vqrshlq_n_s32 (__a
, __b
);
22792 __extension__
extern __inline int32x4_t
22793 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22794 __arm_vqrdmulhq (int32x4_t __a
, int32x4_t __b
)
22796 return __arm_vqrdmulhq_s32 (__a
, __b
);
22799 __extension__
extern __inline int32x4_t
22800 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22801 __arm_vqrdmulhq (int32x4_t __a
, int32_t __b
)
22803 return __arm_vqrdmulhq_n_s32 (__a
, __b
);
22806 __extension__
extern __inline int32x4_t
22807 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22808 __arm_vqdmulhq (int32x4_t __a
, int32x4_t __b
)
22810 return __arm_vqdmulhq_s32 (__a
, __b
);
22813 __extension__
extern __inline int32x4_t
22814 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22815 __arm_vqdmulhq (int32x4_t __a
, int32_t __b
)
22817 return __arm_vqdmulhq_n_s32 (__a
, __b
);
22820 __extension__
extern __inline int32x4_t
22821 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22822 __arm_vqaddq (int32x4_t __a
, int32x4_t __b
)
22824 return __arm_vqaddq_s32 (__a
, __b
);
22827 __extension__
extern __inline int32x4_t
22828 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22829 __arm_vqaddq (int32x4_t __a
, int32_t __b
)
22831 return __arm_vqaddq_n_s32 (__a
, __b
);
22834 __extension__
extern __inline int32x4_t
22835 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22836 __arm_vorrq (int32x4_t __a
, int32x4_t __b
)
22838 return __arm_vorrq_s32 (__a
, __b
);
22841 __extension__
extern __inline int32x4_t
22842 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22843 __arm_vornq (int32x4_t __a
, int32x4_t __b
)
22845 return __arm_vornq_s32 (__a
, __b
);
22848 __extension__
extern __inline int32x4_t
22849 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22850 __arm_vmulq (int32x4_t __a
, int32x4_t __b
)
22852 return __arm_vmulq_s32 (__a
, __b
);
22855 __extension__
extern __inline int32x4_t
22856 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22857 __arm_vmulq (int32x4_t __a
, int32_t __b
)
22859 return __arm_vmulq_n_s32 (__a
, __b
);
22862 __extension__
extern __inline int64x2_t
22863 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22864 __arm_vmulltq_int (int32x4_t __a
, int32x4_t __b
)
22866 return __arm_vmulltq_int_s32 (__a
, __b
);
22869 __extension__
extern __inline int64x2_t
22870 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22871 __arm_vmullbq_int (int32x4_t __a
, int32x4_t __b
)
22873 return __arm_vmullbq_int_s32 (__a
, __b
);
22876 __extension__
extern __inline int32x4_t
22877 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22878 __arm_vmulhq (int32x4_t __a
, int32x4_t __b
)
22880 return __arm_vmulhq_s32 (__a
, __b
);
22883 __extension__
extern __inline
int32_t
22884 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22885 __arm_vmlsdavxq (int32x4_t __a
, int32x4_t __b
)
22887 return __arm_vmlsdavxq_s32 (__a
, __b
);
22890 __extension__
extern __inline
int32_t
22891 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22892 __arm_vmlsdavq (int32x4_t __a
, int32x4_t __b
)
22894 return __arm_vmlsdavq_s32 (__a
, __b
);
22897 __extension__
extern __inline
int32_t
22898 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22899 __arm_vmladavxq (int32x4_t __a
, int32x4_t __b
)
22901 return __arm_vmladavxq_s32 (__a
, __b
);
22904 __extension__
extern __inline
int32_t
22905 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22906 __arm_vmladavq (int32x4_t __a
, int32x4_t __b
)
22908 return __arm_vmladavq_s32 (__a
, __b
);
22911 __extension__
extern __inline
int32_t
22912 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22913 __arm_vminvq (int32_t __a
, int32x4_t __b
)
22915 return __arm_vminvq_s32 (__a
, __b
);
22918 __extension__
extern __inline int32x4_t
22919 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22920 __arm_vminq (int32x4_t __a
, int32x4_t __b
)
22922 return __arm_vminq_s32 (__a
, __b
);
22925 __extension__
extern __inline
int32_t
22926 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22927 __arm_vmaxvq (int32_t __a
, int32x4_t __b
)
22929 return __arm_vmaxvq_s32 (__a
, __b
);
22932 __extension__
extern __inline int32x4_t
22933 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22934 __arm_vmaxq (int32x4_t __a
, int32x4_t __b
)
22936 return __arm_vmaxq_s32 (__a
, __b
);
22939 __extension__
extern __inline int32x4_t
22940 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22941 __arm_vhsubq (int32x4_t __a
, int32x4_t __b
)
22943 return __arm_vhsubq_s32 (__a
, __b
);
22946 __extension__
extern __inline int32x4_t
22947 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22948 __arm_vhsubq (int32x4_t __a
, int32_t __b
)
22950 return __arm_vhsubq_n_s32 (__a
, __b
);
22953 __extension__
extern __inline int32x4_t
22954 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22955 __arm_vhcaddq_rot90 (int32x4_t __a
, int32x4_t __b
)
22957 return __arm_vhcaddq_rot90_s32 (__a
, __b
);
22960 __extension__
extern __inline int32x4_t
22961 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22962 __arm_vhcaddq_rot270 (int32x4_t __a
, int32x4_t __b
)
22964 return __arm_vhcaddq_rot270_s32 (__a
, __b
);
22967 __extension__
extern __inline int32x4_t
22968 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22969 __arm_vhaddq (int32x4_t __a
, int32x4_t __b
)
22971 return __arm_vhaddq_s32 (__a
, __b
);
22974 __extension__
extern __inline int32x4_t
22975 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22976 __arm_vhaddq (int32x4_t __a
, int32_t __b
)
22978 return __arm_vhaddq_n_s32 (__a
, __b
);
22981 __extension__
extern __inline int32x4_t
22982 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22983 __arm_veorq (int32x4_t __a
, int32x4_t __b
)
22985 return __arm_veorq_s32 (__a
, __b
);
22988 __extension__
extern __inline int32x4_t
22989 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22990 __arm_vcaddq_rot90 (int32x4_t __a
, int32x4_t __b
)
22992 return __arm_vcaddq_rot90_s32 (__a
, __b
);
22995 __extension__
extern __inline int32x4_t
22996 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
22997 __arm_vcaddq_rot270 (int32x4_t __a
, int32x4_t __b
)
22999 return __arm_vcaddq_rot270_s32 (__a
, __b
);
23002 __extension__
extern __inline int32x4_t
23003 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23004 __arm_vbrsrq (int32x4_t __a
, int32_t __b
)
23006 return __arm_vbrsrq_n_s32 (__a
, __b
);
23009 __extension__
extern __inline int32x4_t
23010 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23011 __arm_vbicq (int32x4_t __a
, int32x4_t __b
)
23013 return __arm_vbicq_s32 (__a
, __b
);
23016 __extension__
extern __inline int32x4_t
23017 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23018 __arm_vandq (int32x4_t __a
, int32x4_t __b
)
23020 return __arm_vandq_s32 (__a
, __b
);
23023 __extension__
extern __inline
int32_t
23024 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23025 __arm_vaddvaq (int32_t __a
, int32x4_t __b
)
23027 return __arm_vaddvaq_s32 (__a
, __b
);
23030 __extension__
extern __inline int32x4_t
23031 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23032 __arm_vaddq (int32x4_t __a
, int32_t __b
)
23034 return __arm_vaddq_n_s32 (__a
, __b
);
23037 __extension__
extern __inline int32x4_t
23038 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23039 __arm_vabdq (int32x4_t __a
, int32x4_t __b
)
23041 return __arm_vabdq_s32 (__a
, __b
);
23044 __extension__
extern __inline int32x4_t
23045 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23046 __arm_vshlq_n (int32x4_t __a
, const int __imm
)
23048 return __arm_vshlq_n_s32 (__a
, __imm
);
23051 __extension__
extern __inline int32x4_t
23052 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23053 __arm_vrshrq (int32x4_t __a
, const int __imm
)
23055 return __arm_vrshrq_n_s32 (__a
, __imm
);
23058 __extension__
extern __inline int32x4_t
23059 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23060 __arm_vqshlq_n (int32x4_t __a
, const int __imm
)
23062 return __arm_vqshlq_n_s32 (__a
, __imm
);
23065 __extension__
extern __inline uint8x16_t
23066 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23067 __arm_vqmovntq (uint8x16_t __a
, uint16x8_t __b
)
23069 return __arm_vqmovntq_u16 (__a
, __b
);
23072 __extension__
extern __inline uint8x16_t
23073 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23074 __arm_vqmovnbq (uint8x16_t __a
, uint16x8_t __b
)
23076 return __arm_vqmovnbq_u16 (__a
, __b
);
23079 __extension__
extern __inline uint16x8_t
23080 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23081 __arm_vmulltq_poly (uint8x16_t __a
, uint8x16_t __b
)
23083 return __arm_vmulltq_poly_p8 (__a
, __b
);
23086 __extension__
extern __inline uint16x8_t
23087 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23088 __arm_vmullbq_poly (uint8x16_t __a
, uint8x16_t __b
)
23090 return __arm_vmullbq_poly_p8 (__a
, __b
);
23093 __extension__
extern __inline uint8x16_t
23094 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23095 __arm_vmovntq (uint8x16_t __a
, uint16x8_t __b
)
23097 return __arm_vmovntq_u16 (__a
, __b
);
23100 __extension__
extern __inline uint8x16_t
23101 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23102 __arm_vmovnbq (uint8x16_t __a
, uint16x8_t __b
)
23104 return __arm_vmovnbq_u16 (__a
, __b
);
23107 __extension__
extern __inline
uint64_t
23108 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23109 __arm_vmlaldavq (uint16x8_t __a
, uint16x8_t __b
)
23111 return __arm_vmlaldavq_u16 (__a
, __b
);
23114 __extension__
extern __inline uint8x16_t
23115 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23116 __arm_vqmovuntq (uint8x16_t __a
, int16x8_t __b
)
23118 return __arm_vqmovuntq_s16 (__a
, __b
);
23121 __extension__
extern __inline uint8x16_t
23122 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23123 __arm_vqmovunbq (uint8x16_t __a
, int16x8_t __b
)
23125 return __arm_vqmovunbq_s16 (__a
, __b
);
23128 __extension__
extern __inline uint16x8_t
23129 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23130 __arm_vshlltq (uint8x16_t __a
, const int __imm
)
23132 return __arm_vshlltq_n_u8 (__a
, __imm
);
23135 __extension__
extern __inline uint16x8_t
23136 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23137 __arm_vshllbq (uint8x16_t __a
, const int __imm
)
23139 return __arm_vshllbq_n_u8 (__a
, __imm
);
23142 __extension__
extern __inline uint16x8_t
23143 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23144 __arm_vorrq (uint16x8_t __a
, const int __imm
)
23146 return __arm_vorrq_n_u16 (__a
, __imm
);
23149 __extension__
extern __inline uint16x8_t
23150 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23151 __arm_vbicq (uint16x8_t __a
, const int __imm
)
23153 return __arm_vbicq_n_u16 (__a
, __imm
);
23156 __extension__
extern __inline int8x16_t
23157 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23158 __arm_vqmovntq (int8x16_t __a
, int16x8_t __b
)
23160 return __arm_vqmovntq_s16 (__a
, __b
);
23163 __extension__
extern __inline int8x16_t
23164 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23165 __arm_vqmovnbq (int8x16_t __a
, int16x8_t __b
)
23167 return __arm_vqmovnbq_s16 (__a
, __b
);
23170 __extension__
extern __inline int32x4_t
23171 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23172 __arm_vqdmulltq (int16x8_t __a
, int16x8_t __b
)
23174 return __arm_vqdmulltq_s16 (__a
, __b
);
23177 __extension__
extern __inline int32x4_t
23178 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23179 __arm_vqdmulltq (int16x8_t __a
, int16_t __b
)
23181 return __arm_vqdmulltq_n_s16 (__a
, __b
);
23184 __extension__
extern __inline int32x4_t
23185 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23186 __arm_vqdmullbq (int16x8_t __a
, int16x8_t __b
)
23188 return __arm_vqdmullbq_s16 (__a
, __b
);
23191 __extension__
extern __inline int32x4_t
23192 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23193 __arm_vqdmullbq (int16x8_t __a
, int16_t __b
)
23195 return __arm_vqdmullbq_n_s16 (__a
, __b
);
23198 __extension__
extern __inline int8x16_t
23199 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23200 __arm_vmovntq (int8x16_t __a
, int16x8_t __b
)
23202 return __arm_vmovntq_s16 (__a
, __b
);
23205 __extension__
extern __inline int8x16_t
23206 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23207 __arm_vmovnbq (int8x16_t __a
, int16x8_t __b
)
23209 return __arm_vmovnbq_s16 (__a
, __b
);
23212 __extension__
extern __inline
int64_t
23213 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23214 __arm_vmlsldavxq (int16x8_t __a
, int16x8_t __b
)
23216 return __arm_vmlsldavxq_s16 (__a
, __b
);
23219 __extension__
extern __inline
int64_t
23220 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23221 __arm_vmlsldavq (int16x8_t __a
, int16x8_t __b
)
23223 return __arm_vmlsldavq_s16 (__a
, __b
);
23226 __extension__
extern __inline
int64_t
23227 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23228 __arm_vmlaldavxq (int16x8_t __a
, int16x8_t __b
)
23230 return __arm_vmlaldavxq_s16 (__a
, __b
);
23233 __extension__
extern __inline
int64_t
23234 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23235 __arm_vmlaldavq (int16x8_t __a
, int16x8_t __b
)
23237 return __arm_vmlaldavq_s16 (__a
, __b
);
23240 __extension__
extern __inline int16x8_t
23241 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23242 __arm_vshlltq (int8x16_t __a
, const int __imm
)
23244 return __arm_vshlltq_n_s8 (__a
, __imm
);
23247 __extension__
extern __inline int16x8_t
23248 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23249 __arm_vshllbq (int8x16_t __a
, const int __imm
)
23251 return __arm_vshllbq_n_s8 (__a
, __imm
);
23254 __extension__
extern __inline int16x8_t
23255 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23256 __arm_vorrq (int16x8_t __a
, const int __imm
)
23258 return __arm_vorrq_n_s16 (__a
, __imm
);
23261 __extension__
extern __inline int16x8_t
23262 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23263 __arm_vbicq (int16x8_t __a
, const int __imm
)
23265 return __arm_vbicq_n_s16 (__a
, __imm
);
23268 __extension__
extern __inline uint16x8_t
23269 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23270 __arm_vqmovntq (uint16x8_t __a
, uint32x4_t __b
)
23272 return __arm_vqmovntq_u32 (__a
, __b
);
23275 __extension__
extern __inline uint16x8_t
23276 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23277 __arm_vqmovnbq (uint16x8_t __a
, uint32x4_t __b
)
23279 return __arm_vqmovnbq_u32 (__a
, __b
);
23282 __extension__
extern __inline uint32x4_t
23283 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23284 __arm_vmulltq_poly (uint16x8_t __a
, uint16x8_t __b
)
23286 return __arm_vmulltq_poly_p16 (__a
, __b
);
23289 __extension__
extern __inline uint32x4_t
23290 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23291 __arm_vmullbq_poly (uint16x8_t __a
, uint16x8_t __b
)
23293 return __arm_vmullbq_poly_p16 (__a
, __b
);
23296 __extension__
extern __inline uint16x8_t
23297 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23298 __arm_vmovntq (uint16x8_t __a
, uint32x4_t __b
)
23300 return __arm_vmovntq_u32 (__a
, __b
);
23303 __extension__
extern __inline uint16x8_t
23304 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23305 __arm_vmovnbq (uint16x8_t __a
, uint32x4_t __b
)
23307 return __arm_vmovnbq_u32 (__a
, __b
);
23310 __extension__
extern __inline
uint64_t
23311 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23312 __arm_vmlaldavq (uint32x4_t __a
, uint32x4_t __b
)
23314 return __arm_vmlaldavq_u32 (__a
, __b
);
23317 __extension__
extern __inline uint16x8_t
23318 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23319 __arm_vqmovuntq (uint16x8_t __a
, int32x4_t __b
)
23321 return __arm_vqmovuntq_s32 (__a
, __b
);
23324 __extension__
extern __inline uint16x8_t
23325 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23326 __arm_vqmovunbq (uint16x8_t __a
, int32x4_t __b
)
23328 return __arm_vqmovunbq_s32 (__a
, __b
);
23331 __extension__
extern __inline uint32x4_t
23332 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23333 __arm_vshlltq (uint16x8_t __a
, const int __imm
)
23335 return __arm_vshlltq_n_u16 (__a
, __imm
);
23338 __extension__
extern __inline uint32x4_t
23339 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23340 __arm_vshllbq (uint16x8_t __a
, const int __imm
)
23342 return __arm_vshllbq_n_u16 (__a
, __imm
);
23345 __extension__
extern __inline uint32x4_t
23346 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23347 __arm_vorrq (uint32x4_t __a
, const int __imm
)
23349 return __arm_vorrq_n_u32 (__a
, __imm
);
23352 __extension__
extern __inline uint32x4_t
23353 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23354 __arm_vbicq (uint32x4_t __a
, const int __imm
)
23356 return __arm_vbicq_n_u32 (__a
, __imm
);
23359 __extension__
extern __inline int16x8_t
23360 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23361 __arm_vqmovntq (int16x8_t __a
, int32x4_t __b
)
23363 return __arm_vqmovntq_s32 (__a
, __b
);
23366 __extension__
extern __inline int16x8_t
23367 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23368 __arm_vqmovnbq (int16x8_t __a
, int32x4_t __b
)
23370 return __arm_vqmovnbq_s32 (__a
, __b
);
23373 __extension__
extern __inline int64x2_t
23374 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23375 __arm_vqdmulltq (int32x4_t __a
, int32x4_t __b
)
23377 return __arm_vqdmulltq_s32 (__a
, __b
);
23380 __extension__
extern __inline int64x2_t
23381 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23382 __arm_vqdmulltq (int32x4_t __a
, int32_t __b
)
23384 return __arm_vqdmulltq_n_s32 (__a
, __b
);
23387 __extension__
extern __inline int64x2_t
23388 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23389 __arm_vqdmullbq (int32x4_t __a
, int32x4_t __b
)
23391 return __arm_vqdmullbq_s32 (__a
, __b
);
23394 __extension__
extern __inline int64x2_t
23395 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23396 __arm_vqdmullbq (int32x4_t __a
, int32_t __b
)
23398 return __arm_vqdmullbq_n_s32 (__a
, __b
);
23401 __extension__
extern __inline int16x8_t
23402 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23403 __arm_vmovntq (int16x8_t __a
, int32x4_t __b
)
23405 return __arm_vmovntq_s32 (__a
, __b
);
23408 __extension__
extern __inline int16x8_t
23409 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23410 __arm_vmovnbq (int16x8_t __a
, int32x4_t __b
)
23412 return __arm_vmovnbq_s32 (__a
, __b
);
23415 __extension__
extern __inline
int64_t
23416 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23417 __arm_vmlsldavxq (int32x4_t __a
, int32x4_t __b
)
23419 return __arm_vmlsldavxq_s32 (__a
, __b
);
23422 __extension__
extern __inline
int64_t
23423 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23424 __arm_vmlsldavq (int32x4_t __a
, int32x4_t __b
)
23426 return __arm_vmlsldavq_s32 (__a
, __b
);
23429 __extension__
extern __inline
int64_t
23430 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23431 __arm_vmlaldavxq (int32x4_t __a
, int32x4_t __b
)
23433 return __arm_vmlaldavxq_s32 (__a
, __b
);
23436 __extension__
extern __inline
int64_t
23437 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23438 __arm_vmlaldavq (int32x4_t __a
, int32x4_t __b
)
23440 return __arm_vmlaldavq_s32 (__a
, __b
);
23443 __extension__
extern __inline int32x4_t
23444 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23445 __arm_vshlltq (int16x8_t __a
, const int __imm
)
23447 return __arm_vshlltq_n_s16 (__a
, __imm
);
23450 __extension__
extern __inline int32x4_t
23451 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23452 __arm_vshllbq (int16x8_t __a
, const int __imm
)
23454 return __arm_vshllbq_n_s16 (__a
, __imm
);
23457 __extension__
extern __inline int32x4_t
23458 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23459 __arm_vorrq (int32x4_t __a
, const int __imm
)
23461 return __arm_vorrq_n_s32 (__a
, __imm
);
23464 __extension__
extern __inline int32x4_t
23465 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23466 __arm_vbicq (int32x4_t __a
, const int __imm
)
23468 return __arm_vbicq_n_s32 (__a
, __imm
);
23471 __extension__
extern __inline
uint64_t
23472 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23473 __arm_vrmlaldavhq (uint32x4_t __a
, uint32x4_t __b
)
23475 return __arm_vrmlaldavhq_u32 (__a
, __b
);
23478 __extension__
extern __inline
uint64_t
23479 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23480 __arm_vaddlvaq (uint64_t __a
, uint32x4_t __b
)
23482 return __arm_vaddlvaq_u32 (__a
, __b
);
23485 __extension__
extern __inline
int64_t
23486 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23487 __arm_vrmlsldavhxq (int32x4_t __a
, int32x4_t __b
)
23489 return __arm_vrmlsldavhxq_s32 (__a
, __b
);
23492 __extension__
extern __inline
int64_t
23493 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23494 __arm_vrmlsldavhq (int32x4_t __a
, int32x4_t __b
)
23496 return __arm_vrmlsldavhq_s32 (__a
, __b
);
23499 __extension__
extern __inline
int64_t
23500 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23501 __arm_vrmlaldavhxq (int32x4_t __a
, int32x4_t __b
)
23503 return __arm_vrmlaldavhxq_s32 (__a
, __b
);
23506 __extension__
extern __inline
int64_t
23507 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23508 __arm_vrmlaldavhq (int32x4_t __a
, int32x4_t __b
)
23510 return __arm_vrmlaldavhq_s32 (__a
, __b
);
23513 __extension__
extern __inline
int64_t
23514 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23515 __arm_vaddlvaq (int64_t __a
, int32x4_t __b
)
23517 return __arm_vaddlvaq_s32 (__a
, __b
);
23520 __extension__
extern __inline
uint32_t
23521 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23522 __arm_vabavq (uint32_t __a
, int8x16_t __b
, int8x16_t __c
)
23524 return __arm_vabavq_s8 (__a
, __b
, __c
);
23527 __extension__
extern __inline
uint32_t
23528 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23529 __arm_vabavq (uint32_t __a
, int16x8_t __b
, int16x8_t __c
)
23531 return __arm_vabavq_s16 (__a
, __b
, __c
);
23534 __extension__
extern __inline
uint32_t
23535 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23536 __arm_vabavq (uint32_t __a
, int32x4_t __b
, int32x4_t __c
)
23538 return __arm_vabavq_s32 (__a
, __b
, __c
);
23541 __extension__
extern __inline
uint32_t
23542 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23543 __arm_vabavq (uint32_t __a
, uint8x16_t __b
, uint8x16_t __c
)
23545 return __arm_vabavq_u8 (__a
, __b
, __c
);
23548 __extension__
extern __inline
uint32_t
23549 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23550 __arm_vabavq (uint32_t __a
, uint16x8_t __b
, uint16x8_t __c
)
23552 return __arm_vabavq_u16 (__a
, __b
, __c
);
23555 __extension__
extern __inline
uint32_t
23556 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23557 __arm_vabavq (uint32_t __a
, uint32x4_t __b
, uint32x4_t __c
)
23559 return __arm_vabavq_u32 (__a
, __b
, __c
);
23562 __extension__
extern __inline int16x8_t
23563 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23564 __arm_vbicq_m_n (int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
23566 return __arm_vbicq_m_n_s16 (__a
, __imm
, __p
);
23569 __extension__
extern __inline int32x4_t
23570 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23571 __arm_vbicq_m_n (int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
23573 return __arm_vbicq_m_n_s32 (__a
, __imm
, __p
);
23576 __extension__
extern __inline uint16x8_t
23577 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23578 __arm_vbicq_m_n (uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
23580 return __arm_vbicq_m_n_u16 (__a
, __imm
, __p
);
23583 __extension__
extern __inline uint32x4_t
23584 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23585 __arm_vbicq_m_n (uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
23587 return __arm_vbicq_m_n_u32 (__a
, __imm
, __p
);
23590 __extension__
extern __inline int8x16_t
23591 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23592 __arm_vqrshrnbq (int8x16_t __a
, int16x8_t __b
, const int __imm
)
23594 return __arm_vqrshrnbq_n_s16 (__a
, __b
, __imm
);
23597 __extension__
extern __inline uint8x16_t
23598 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23599 __arm_vqrshrnbq (uint8x16_t __a
, uint16x8_t __b
, const int __imm
)
23601 return __arm_vqrshrnbq_n_u16 (__a
, __b
, __imm
);
23604 __extension__
extern __inline int16x8_t
23605 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23606 __arm_vqrshrnbq (int16x8_t __a
, int32x4_t __b
, const int __imm
)
23608 return __arm_vqrshrnbq_n_s32 (__a
, __b
, __imm
);
23611 __extension__
extern __inline uint16x8_t
23612 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23613 __arm_vqrshrnbq (uint16x8_t __a
, uint32x4_t __b
, const int __imm
)
23615 return __arm_vqrshrnbq_n_u32 (__a
, __b
, __imm
);
23618 __extension__
extern __inline uint8x16_t
23619 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23620 __arm_vqrshrunbq (uint8x16_t __a
, int16x8_t __b
, const int __imm
)
23622 return __arm_vqrshrunbq_n_s16 (__a
, __b
, __imm
);
23625 __extension__
extern __inline uint16x8_t
23626 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23627 __arm_vqrshrunbq (uint16x8_t __a
, int32x4_t __b
, const int __imm
)
23629 return __arm_vqrshrunbq_n_s32 (__a
, __b
, __imm
);
23632 __extension__
extern __inline
int64_t
23633 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23634 __arm_vrmlaldavhaq (int64_t __a
, int32x4_t __b
, int32x4_t __c
)
23636 return __arm_vrmlaldavhaq_s32 (__a
, __b
, __c
);
23639 __extension__
extern __inline
uint64_t
23640 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23641 __arm_vrmlaldavhaq (uint64_t __a
, uint32x4_t __b
, uint32x4_t __c
)
23643 return __arm_vrmlaldavhaq_u32 (__a
, __b
, __c
);
23646 __extension__
extern __inline int8x16_t
23647 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23648 __arm_vshlcq (int8x16_t __a
, uint32_t * __b
, const int __imm
)
23650 return __arm_vshlcq_s8 (__a
, __b
, __imm
);
23653 __extension__
extern __inline uint8x16_t
23654 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23655 __arm_vshlcq (uint8x16_t __a
, uint32_t * __b
, const int __imm
)
23657 return __arm_vshlcq_u8 (__a
, __b
, __imm
);
23660 __extension__
extern __inline int16x8_t
23661 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23662 __arm_vshlcq (int16x8_t __a
, uint32_t * __b
, const int __imm
)
23664 return __arm_vshlcq_s16 (__a
, __b
, __imm
);
23667 __extension__
extern __inline uint16x8_t
23668 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23669 __arm_vshlcq (uint16x8_t __a
, uint32_t * __b
, const int __imm
)
23671 return __arm_vshlcq_u16 (__a
, __b
, __imm
);
23674 __extension__
extern __inline int32x4_t
23675 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23676 __arm_vshlcq (int32x4_t __a
, uint32_t * __b
, const int __imm
)
23678 return __arm_vshlcq_s32 (__a
, __b
, __imm
);
23681 __extension__
extern __inline uint32x4_t
23682 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23683 __arm_vshlcq (uint32x4_t __a
, uint32_t * __b
, const int __imm
)
23685 return __arm_vshlcq_u32 (__a
, __b
, __imm
);
23688 __extension__
extern __inline uint8x16_t
23689 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23690 __arm_vpselq (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
23692 return __arm_vpselq_u8 (__a
, __b
, __p
);
23695 __extension__
extern __inline int8x16_t
23696 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23697 __arm_vpselq (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
23699 return __arm_vpselq_s8 (__a
, __b
, __p
);
23702 __extension__
extern __inline uint8x16_t
23703 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23704 __arm_vrev64q_m (uint8x16_t __inactive
, uint8x16_t __a
, mve_pred16_t __p
)
23706 return __arm_vrev64q_m_u8 (__inactive
, __a
, __p
);
23709 __extension__
extern __inline uint8x16_t
23710 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23711 __arm_vmvnq_m (uint8x16_t __inactive
, uint8x16_t __a
, mve_pred16_t __p
)
23713 return __arm_vmvnq_m_u8 (__inactive
, __a
, __p
);
23716 __extension__
extern __inline uint8x16_t
23717 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23718 __arm_vmlasq (uint8x16_t __a
, uint8x16_t __b
, uint8_t __c
)
23720 return __arm_vmlasq_n_u8 (__a
, __b
, __c
);
23723 __extension__
extern __inline uint8x16_t
23724 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23725 __arm_vmlaq (uint8x16_t __a
, uint8x16_t __b
, uint8_t __c
)
23727 return __arm_vmlaq_n_u8 (__a
, __b
, __c
);
23730 __extension__
extern __inline
uint32_t
23731 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23732 __arm_vmladavq_p (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
23734 return __arm_vmladavq_p_u8 (__a
, __b
, __p
);
23737 __extension__
extern __inline
uint32_t
23738 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23739 __arm_vmladavaq (uint32_t __a
, uint8x16_t __b
, uint8x16_t __c
)
23741 return __arm_vmladavaq_u8 (__a
, __b
, __c
);
23744 __extension__
extern __inline
uint8_t
23745 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23746 __arm_vminvq_p (uint8_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
23748 return __arm_vminvq_p_u8 (__a
, __b
, __p
);
23751 __extension__
extern __inline
uint8_t
23752 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23753 __arm_vmaxvq_p (uint8_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
23755 return __arm_vmaxvq_p_u8 (__a
, __b
, __p
);
23758 __extension__
extern __inline uint8x16_t
23759 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23760 __arm_vdupq_m (uint8x16_t __inactive
, uint8_t __a
, mve_pred16_t __p
)
23762 return __arm_vdupq_m_n_u8 (__inactive
, __a
, __p
);
23765 __extension__
extern __inline mve_pred16_t
23766 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23767 __arm_vcmpneq_m (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
23769 return __arm_vcmpneq_m_u8 (__a
, __b
, __p
);
23772 __extension__
extern __inline mve_pred16_t
23773 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23774 __arm_vcmpneq_m (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
23776 return __arm_vcmpneq_m_n_u8 (__a
, __b
, __p
);
23779 __extension__
extern __inline mve_pred16_t
23780 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23781 __arm_vcmphiq_m (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
23783 return __arm_vcmphiq_m_u8 (__a
, __b
, __p
);
23786 __extension__
extern __inline mve_pred16_t
23787 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23788 __arm_vcmphiq_m (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
23790 return __arm_vcmphiq_m_n_u8 (__a
, __b
, __p
);
23793 __extension__
extern __inline mve_pred16_t
23794 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23795 __arm_vcmpeqq_m (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
23797 return __arm_vcmpeqq_m_u8 (__a
, __b
, __p
);
23800 __extension__
extern __inline mve_pred16_t
23801 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23802 __arm_vcmpeqq_m (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
23804 return __arm_vcmpeqq_m_n_u8 (__a
, __b
, __p
);
23807 __extension__
extern __inline mve_pred16_t
23808 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23809 __arm_vcmpcsq_m (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
23811 return __arm_vcmpcsq_m_u8 (__a
, __b
, __p
);
23814 __extension__
extern __inline mve_pred16_t
23815 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23816 __arm_vcmpcsq_m (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
23818 return __arm_vcmpcsq_m_n_u8 (__a
, __b
, __p
);
23821 __extension__
extern __inline uint8x16_t
23822 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23823 __arm_vclzq_m (uint8x16_t __inactive
, uint8x16_t __a
, mve_pred16_t __p
)
23825 return __arm_vclzq_m_u8 (__inactive
, __a
, __p
);
23828 __extension__
extern __inline
uint32_t
23829 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23830 __arm_vaddvaq_p (uint32_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
23832 return __arm_vaddvaq_p_u8 (__a
, __b
, __p
);
23835 __extension__
extern __inline uint8x16_t
23836 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23837 __arm_vsriq (uint8x16_t __a
, uint8x16_t __b
, const int __imm
)
23839 return __arm_vsriq_n_u8 (__a
, __b
, __imm
);
23842 __extension__
extern __inline uint8x16_t
23843 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23844 __arm_vsliq (uint8x16_t __a
, uint8x16_t __b
, const int __imm
)
23846 return __arm_vsliq_n_u8 (__a
, __b
, __imm
);
23849 __extension__
extern __inline uint8x16_t
23850 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23851 __arm_vshlq_m_r (uint8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
23853 return __arm_vshlq_m_r_u8 (__a
, __b
, __p
);
23856 __extension__
extern __inline uint8x16_t
23857 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23858 __arm_vrshlq_m_n (uint8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
23860 return __arm_vrshlq_m_n_u8 (__a
, __b
, __p
);
23863 __extension__
extern __inline uint8x16_t
23864 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23865 __arm_vqshlq_m_r (uint8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
23867 return __arm_vqshlq_m_r_u8 (__a
, __b
, __p
);
23870 __extension__
extern __inline uint8x16_t
23871 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23872 __arm_vqrshlq_m_n (uint8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
23874 return __arm_vqrshlq_m_n_u8 (__a
, __b
, __p
);
23877 __extension__
extern __inline
uint8_t
23878 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23879 __arm_vminavq_p (uint8_t __a
, int8x16_t __b
, mve_pred16_t __p
)
23881 return __arm_vminavq_p_s8 (__a
, __b
, __p
);
23884 __extension__
extern __inline uint8x16_t
23885 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23886 __arm_vminaq_m (uint8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
23888 return __arm_vminaq_m_s8 (__a
, __b
, __p
);
23891 __extension__
extern __inline
uint8_t
23892 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23893 __arm_vmaxavq_p (uint8_t __a
, int8x16_t __b
, mve_pred16_t __p
)
23895 return __arm_vmaxavq_p_s8 (__a
, __b
, __p
);
23898 __extension__
extern __inline uint8x16_t
23899 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23900 __arm_vmaxaq_m (uint8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
23902 return __arm_vmaxaq_m_s8 (__a
, __b
, __p
);
23905 __extension__
extern __inline mve_pred16_t
23906 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23907 __arm_vcmpneq_m (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
23909 return __arm_vcmpneq_m_s8 (__a
, __b
, __p
);
23912 __extension__
extern __inline mve_pred16_t
23913 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23914 __arm_vcmpneq_m (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
23916 return __arm_vcmpneq_m_n_s8 (__a
, __b
, __p
);
23919 __extension__
extern __inline mve_pred16_t
23920 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23921 __arm_vcmpltq_m (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
23923 return __arm_vcmpltq_m_s8 (__a
, __b
, __p
);
23926 __extension__
extern __inline mve_pred16_t
23927 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23928 __arm_vcmpltq_m (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
23930 return __arm_vcmpltq_m_n_s8 (__a
, __b
, __p
);
23933 __extension__
extern __inline mve_pred16_t
23934 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23935 __arm_vcmpleq_m (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
23937 return __arm_vcmpleq_m_s8 (__a
, __b
, __p
);
23940 __extension__
extern __inline mve_pred16_t
23941 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23942 __arm_vcmpleq_m (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
23944 return __arm_vcmpleq_m_n_s8 (__a
, __b
, __p
);
23947 __extension__
extern __inline mve_pred16_t
23948 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23949 __arm_vcmpgtq_m (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
23951 return __arm_vcmpgtq_m_s8 (__a
, __b
, __p
);
23954 __extension__
extern __inline mve_pred16_t
23955 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23956 __arm_vcmpgtq_m (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
23958 return __arm_vcmpgtq_m_n_s8 (__a
, __b
, __p
);
23961 __extension__
extern __inline mve_pred16_t
23962 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23963 __arm_vcmpgeq_m (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
23965 return __arm_vcmpgeq_m_s8 (__a
, __b
, __p
);
23968 __extension__
extern __inline mve_pred16_t
23969 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23970 __arm_vcmpgeq_m (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
23972 return __arm_vcmpgeq_m_n_s8 (__a
, __b
, __p
);
23975 __extension__
extern __inline mve_pred16_t
23976 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23977 __arm_vcmpeqq_m (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
23979 return __arm_vcmpeqq_m_s8 (__a
, __b
, __p
);
23982 __extension__
extern __inline mve_pred16_t
23983 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23984 __arm_vcmpeqq_m (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
23986 return __arm_vcmpeqq_m_n_s8 (__a
, __b
, __p
);
23989 __extension__
extern __inline int8x16_t
23990 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23991 __arm_vshlq_m_r (int8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
23993 return __arm_vshlq_m_r_s8 (__a
, __b
, __p
);
23996 __extension__
extern __inline int8x16_t
23997 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
23998 __arm_vrshlq_m_n (int8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
24000 return __arm_vrshlq_m_n_s8 (__a
, __b
, __p
);
24003 __extension__
extern __inline int8x16_t
24004 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24005 __arm_vrev64q_m (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
24007 return __arm_vrev64q_m_s8 (__inactive
, __a
, __p
);
24010 __extension__
extern __inline int8x16_t
24011 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24012 __arm_vqshlq_m_r (int8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
24014 return __arm_vqshlq_m_r_s8 (__a
, __b
, __p
);
24017 __extension__
extern __inline int8x16_t
24018 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24019 __arm_vqrshlq_m_n (int8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
24021 return __arm_vqrshlq_m_n_s8 (__a
, __b
, __p
);
24024 __extension__
extern __inline int8x16_t
24025 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24026 __arm_vqnegq_m (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
24028 return __arm_vqnegq_m_s8 (__inactive
, __a
, __p
);
24031 __extension__
extern __inline int8x16_t
24032 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24033 __arm_vqabsq_m (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
24035 return __arm_vqabsq_m_s8 (__inactive
, __a
, __p
);
24038 __extension__
extern __inline int8x16_t
24039 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24040 __arm_vnegq_m (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
24042 return __arm_vnegq_m_s8 (__inactive
, __a
, __p
);
24045 __extension__
extern __inline int8x16_t
24046 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24047 __arm_vmvnq_m (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
24049 return __arm_vmvnq_m_s8 (__inactive
, __a
, __p
);
24052 __extension__
extern __inline
int32_t
24053 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24054 __arm_vmlsdavxq_p (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
24056 return __arm_vmlsdavxq_p_s8 (__a
, __b
, __p
);
24059 __extension__
extern __inline
int32_t
24060 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24061 __arm_vmlsdavq_p (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
24063 return __arm_vmlsdavq_p_s8 (__a
, __b
, __p
);
24066 __extension__
extern __inline
int32_t
24067 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24068 __arm_vmladavxq_p (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
24070 return __arm_vmladavxq_p_s8 (__a
, __b
, __p
);
24073 __extension__
extern __inline
int32_t
24074 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24075 __arm_vmladavq_p (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
24077 return __arm_vmladavq_p_s8 (__a
, __b
, __p
);
24080 __extension__
extern __inline
int8_t
24081 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24082 __arm_vminvq_p (int8_t __a
, int8x16_t __b
, mve_pred16_t __p
)
24084 return __arm_vminvq_p_s8 (__a
, __b
, __p
);
24087 __extension__
extern __inline
int8_t
24088 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24089 __arm_vmaxvq_p (int8_t __a
, int8x16_t __b
, mve_pred16_t __p
)
24091 return __arm_vmaxvq_p_s8 (__a
, __b
, __p
);
24094 __extension__
extern __inline int8x16_t
24095 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24096 __arm_vdupq_m (int8x16_t __inactive
, int8_t __a
, mve_pred16_t __p
)
24098 return __arm_vdupq_m_n_s8 (__inactive
, __a
, __p
);
24101 __extension__
extern __inline int8x16_t
24102 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24103 __arm_vclzq_m (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
24105 return __arm_vclzq_m_s8 (__inactive
, __a
, __p
);
24108 __extension__
extern __inline int8x16_t
24109 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24110 __arm_vclsq_m (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
24112 return __arm_vclsq_m_s8 (__inactive
, __a
, __p
);
24115 __extension__
extern __inline
int32_t
24116 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24117 __arm_vaddvaq_p (int32_t __a
, int8x16_t __b
, mve_pred16_t __p
)
24119 return __arm_vaddvaq_p_s8 (__a
, __b
, __p
);
24122 __extension__
extern __inline int8x16_t
24123 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24124 __arm_vabsq_m (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
24126 return __arm_vabsq_m_s8 (__inactive
, __a
, __p
);
24129 __extension__
extern __inline int8x16_t
24130 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24131 __arm_vqrdmlsdhxq (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
)
24133 return __arm_vqrdmlsdhxq_s8 (__inactive
, __a
, __b
);
24136 __extension__
extern __inline int8x16_t
24137 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24138 __arm_vqrdmlsdhq (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
)
24140 return __arm_vqrdmlsdhq_s8 (__inactive
, __a
, __b
);
24143 __extension__
extern __inline int8x16_t
24144 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24145 __arm_vqrdmlashq (int8x16_t __a
, int8x16_t __b
, int8_t __c
)
24147 return __arm_vqrdmlashq_n_s8 (__a
, __b
, __c
);
24150 __extension__
extern __inline int8x16_t
24151 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24152 __arm_vqdmlashq (int8x16_t __a
, int8x16_t __b
, int8_t __c
)
24154 return __arm_vqdmlashq_n_s8 (__a
, __b
, __c
);
24157 __extension__
extern __inline int8x16_t
24158 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24159 __arm_vqrdmlahq (int8x16_t __a
, int8x16_t __b
, int8_t __c
)
24161 return __arm_vqrdmlahq_n_s8 (__a
, __b
, __c
);
24164 __extension__
extern __inline int8x16_t
24165 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24166 __arm_vqrdmladhxq (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
)
24168 return __arm_vqrdmladhxq_s8 (__inactive
, __a
, __b
);
24171 __extension__
extern __inline int8x16_t
24172 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24173 __arm_vqrdmladhq (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
)
24175 return __arm_vqrdmladhq_s8 (__inactive
, __a
, __b
);
24178 __extension__
extern __inline int8x16_t
24179 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24180 __arm_vqdmlsdhxq (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
)
24182 return __arm_vqdmlsdhxq_s8 (__inactive
, __a
, __b
);
24185 __extension__
extern __inline int8x16_t
24186 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24187 __arm_vqdmlsdhq (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
)
24189 return __arm_vqdmlsdhq_s8 (__inactive
, __a
, __b
);
24192 __extension__
extern __inline int8x16_t
24193 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24194 __arm_vqdmlahq (int8x16_t __a
, int8x16_t __b
, int8_t __c
)
24196 return __arm_vqdmlahq_n_s8 (__a
, __b
, __c
);
24199 __extension__
extern __inline int8x16_t
24200 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24201 __arm_vqdmladhxq (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
)
24203 return __arm_vqdmladhxq_s8 (__inactive
, __a
, __b
);
24206 __extension__
extern __inline int8x16_t
24207 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24208 __arm_vqdmladhq (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
)
24210 return __arm_vqdmladhq_s8 (__inactive
, __a
, __b
);
24213 __extension__
extern __inline
int32_t
24214 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24215 __arm_vmlsdavaxq (int32_t __a
, int8x16_t __b
, int8x16_t __c
)
24217 return __arm_vmlsdavaxq_s8 (__a
, __b
, __c
);
24220 __extension__
extern __inline
int32_t
24221 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24222 __arm_vmlsdavaq (int32_t __a
, int8x16_t __b
, int8x16_t __c
)
24224 return __arm_vmlsdavaq_s8 (__a
, __b
, __c
);
24227 __extension__
extern __inline int8x16_t
24228 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24229 __arm_vmlasq (int8x16_t __a
, int8x16_t __b
, int8_t __c
)
24231 return __arm_vmlasq_n_s8 (__a
, __b
, __c
);
24234 __extension__
extern __inline int8x16_t
24235 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24236 __arm_vmlaq (int8x16_t __a
, int8x16_t __b
, int8_t __c
)
24238 return __arm_vmlaq_n_s8 (__a
, __b
, __c
);
24241 __extension__
extern __inline
int32_t
24242 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24243 __arm_vmladavaxq (int32_t __a
, int8x16_t __b
, int8x16_t __c
)
24245 return __arm_vmladavaxq_s8 (__a
, __b
, __c
);
24248 __extension__
extern __inline
int32_t
24249 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24250 __arm_vmladavaq (int32_t __a
, int8x16_t __b
, int8x16_t __c
)
24252 return __arm_vmladavaq_s8 (__a
, __b
, __c
);
24255 __extension__
extern __inline int8x16_t
24256 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24257 __arm_vsriq (int8x16_t __a
, int8x16_t __b
, const int __imm
)
24259 return __arm_vsriq_n_s8 (__a
, __b
, __imm
);
24262 __extension__
extern __inline int8x16_t
24263 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24264 __arm_vsliq (int8x16_t __a
, int8x16_t __b
, const int __imm
)
24266 return __arm_vsliq_n_s8 (__a
, __b
, __imm
);
24269 __extension__
extern __inline uint16x8_t
24270 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24271 __arm_vpselq (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
24273 return __arm_vpselq_u16 (__a
, __b
, __p
);
24276 __extension__
extern __inline int16x8_t
24277 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24278 __arm_vpselq (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24280 return __arm_vpselq_s16 (__a
, __b
, __p
);
24283 __extension__
extern __inline uint16x8_t
24284 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24285 __arm_vrev64q_m (uint16x8_t __inactive
, uint16x8_t __a
, mve_pred16_t __p
)
24287 return __arm_vrev64q_m_u16 (__inactive
, __a
, __p
);
24290 __extension__
extern __inline uint16x8_t
24291 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24292 __arm_vmvnq_m (uint16x8_t __inactive
, uint16x8_t __a
, mve_pred16_t __p
)
24294 return __arm_vmvnq_m_u16 (__inactive
, __a
, __p
);
24297 __extension__
extern __inline uint16x8_t
24298 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24299 __arm_vmlasq (uint16x8_t __a
, uint16x8_t __b
, uint16_t __c
)
24301 return __arm_vmlasq_n_u16 (__a
, __b
, __c
);
24304 __extension__
extern __inline uint16x8_t
24305 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24306 __arm_vmlaq (uint16x8_t __a
, uint16x8_t __b
, uint16_t __c
)
24308 return __arm_vmlaq_n_u16 (__a
, __b
, __c
);
24311 __extension__
extern __inline
uint32_t
24312 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24313 __arm_vmladavq_p (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
24315 return __arm_vmladavq_p_u16 (__a
, __b
, __p
);
24318 __extension__
extern __inline
uint32_t
24319 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24320 __arm_vmladavaq (uint32_t __a
, uint16x8_t __b
, uint16x8_t __c
)
24322 return __arm_vmladavaq_u16 (__a
, __b
, __c
);
24325 __extension__
extern __inline
uint16_t
24326 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24327 __arm_vminvq_p (uint16_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
24329 return __arm_vminvq_p_u16 (__a
, __b
, __p
);
24332 __extension__
extern __inline
uint16_t
24333 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24334 __arm_vmaxvq_p (uint16_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
24336 return __arm_vmaxvq_p_u16 (__a
, __b
, __p
);
24339 __extension__
extern __inline uint16x8_t
24340 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24341 __arm_vdupq_m (uint16x8_t __inactive
, uint16_t __a
, mve_pred16_t __p
)
24343 return __arm_vdupq_m_n_u16 (__inactive
, __a
, __p
);
24346 __extension__
extern __inline mve_pred16_t
24347 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24348 __arm_vcmpneq_m (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
24350 return __arm_vcmpneq_m_u16 (__a
, __b
, __p
);
24353 __extension__
extern __inline mve_pred16_t
24354 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24355 __arm_vcmpneq_m (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
24357 return __arm_vcmpneq_m_n_u16 (__a
, __b
, __p
);
24360 __extension__
extern __inline mve_pred16_t
24361 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24362 __arm_vcmphiq_m (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
24364 return __arm_vcmphiq_m_u16 (__a
, __b
, __p
);
24367 __extension__
extern __inline mve_pred16_t
24368 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24369 __arm_vcmphiq_m (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
24371 return __arm_vcmphiq_m_n_u16 (__a
, __b
, __p
);
24374 __extension__
extern __inline mve_pred16_t
24375 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24376 __arm_vcmpeqq_m (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
24378 return __arm_vcmpeqq_m_u16 (__a
, __b
, __p
);
24381 __extension__
extern __inline mve_pred16_t
24382 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24383 __arm_vcmpeqq_m (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
24385 return __arm_vcmpeqq_m_n_u16 (__a
, __b
, __p
);
24388 __extension__
extern __inline mve_pred16_t
24389 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24390 __arm_vcmpcsq_m (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
24392 return __arm_vcmpcsq_m_u16 (__a
, __b
, __p
);
24395 __extension__
extern __inline mve_pred16_t
24396 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24397 __arm_vcmpcsq_m (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
24399 return __arm_vcmpcsq_m_n_u16 (__a
, __b
, __p
);
24402 __extension__
extern __inline uint16x8_t
24403 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24404 __arm_vclzq_m (uint16x8_t __inactive
, uint16x8_t __a
, mve_pred16_t __p
)
24406 return __arm_vclzq_m_u16 (__inactive
, __a
, __p
);
24409 __extension__
extern __inline
uint32_t
24410 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24411 __arm_vaddvaq_p (uint32_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
24413 return __arm_vaddvaq_p_u16 (__a
, __b
, __p
);
24416 __extension__
extern __inline uint16x8_t
24417 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24418 __arm_vsriq (uint16x8_t __a
, uint16x8_t __b
, const int __imm
)
24420 return __arm_vsriq_n_u16 (__a
, __b
, __imm
);
24423 __extension__
extern __inline uint16x8_t
24424 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24425 __arm_vsliq (uint16x8_t __a
, uint16x8_t __b
, const int __imm
)
24427 return __arm_vsliq_n_u16 (__a
, __b
, __imm
);
24430 __extension__
extern __inline uint16x8_t
24431 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24432 __arm_vshlq_m_r (uint16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
24434 return __arm_vshlq_m_r_u16 (__a
, __b
, __p
);
24437 __extension__
extern __inline uint16x8_t
24438 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24439 __arm_vrshlq_m_n (uint16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
24441 return __arm_vrshlq_m_n_u16 (__a
, __b
, __p
);
24444 __extension__
extern __inline uint16x8_t
24445 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24446 __arm_vqshlq_m_r (uint16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
24448 return __arm_vqshlq_m_r_u16 (__a
, __b
, __p
);
24451 __extension__
extern __inline uint16x8_t
24452 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24453 __arm_vqrshlq_m_n (uint16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
24455 return __arm_vqrshlq_m_n_u16 (__a
, __b
, __p
);
24458 __extension__
extern __inline
uint16_t
24459 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24460 __arm_vminavq_p (uint16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24462 return __arm_vminavq_p_s16 (__a
, __b
, __p
);
24465 __extension__
extern __inline uint16x8_t
24466 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24467 __arm_vminaq_m (uint16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24469 return __arm_vminaq_m_s16 (__a
, __b
, __p
);
24472 __extension__
extern __inline
uint16_t
24473 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24474 __arm_vmaxavq_p (uint16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24476 return __arm_vmaxavq_p_s16 (__a
, __b
, __p
);
24479 __extension__
extern __inline uint16x8_t
24480 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24481 __arm_vmaxaq_m (uint16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24483 return __arm_vmaxaq_m_s16 (__a
, __b
, __p
);
24486 __extension__
extern __inline mve_pred16_t
24487 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24488 __arm_vcmpneq_m (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24490 return __arm_vcmpneq_m_s16 (__a
, __b
, __p
);
24493 __extension__
extern __inline mve_pred16_t
24494 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24495 __arm_vcmpneq_m (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
24497 return __arm_vcmpneq_m_n_s16 (__a
, __b
, __p
);
24500 __extension__
extern __inline mve_pred16_t
24501 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24502 __arm_vcmpltq_m (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24504 return __arm_vcmpltq_m_s16 (__a
, __b
, __p
);
24507 __extension__
extern __inline mve_pred16_t
24508 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24509 __arm_vcmpltq_m (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
24511 return __arm_vcmpltq_m_n_s16 (__a
, __b
, __p
);
24514 __extension__
extern __inline mve_pred16_t
24515 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24516 __arm_vcmpleq_m (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24518 return __arm_vcmpleq_m_s16 (__a
, __b
, __p
);
24521 __extension__
extern __inline mve_pred16_t
24522 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24523 __arm_vcmpleq_m (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
24525 return __arm_vcmpleq_m_n_s16 (__a
, __b
, __p
);
24528 __extension__
extern __inline mve_pred16_t
24529 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24530 __arm_vcmpgtq_m (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24532 return __arm_vcmpgtq_m_s16 (__a
, __b
, __p
);
24535 __extension__
extern __inline mve_pred16_t
24536 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24537 __arm_vcmpgtq_m (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
24539 return __arm_vcmpgtq_m_n_s16 (__a
, __b
, __p
);
24542 __extension__
extern __inline mve_pred16_t
24543 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24544 __arm_vcmpgeq_m (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24546 return __arm_vcmpgeq_m_s16 (__a
, __b
, __p
);
24549 __extension__
extern __inline mve_pred16_t
24550 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24551 __arm_vcmpgeq_m (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
24553 return __arm_vcmpgeq_m_n_s16 (__a
, __b
, __p
);
24556 __extension__
extern __inline mve_pred16_t
24557 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24558 __arm_vcmpeqq_m (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24560 return __arm_vcmpeqq_m_s16 (__a
, __b
, __p
);
24563 __extension__
extern __inline mve_pred16_t
24564 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24565 __arm_vcmpeqq_m (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
24567 return __arm_vcmpeqq_m_n_s16 (__a
, __b
, __p
);
24570 __extension__
extern __inline int16x8_t
24571 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24572 __arm_vshlq_m_r (int16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
24574 return __arm_vshlq_m_r_s16 (__a
, __b
, __p
);
24577 __extension__
extern __inline int16x8_t
24578 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24579 __arm_vrshlq_m_n (int16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
24581 return __arm_vrshlq_m_n_s16 (__a
, __b
, __p
);
24584 __extension__
extern __inline int16x8_t
24585 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24586 __arm_vrev64q_m (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
24588 return __arm_vrev64q_m_s16 (__inactive
, __a
, __p
);
24591 __extension__
extern __inline int16x8_t
24592 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24593 __arm_vqshlq_m_r (int16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
24595 return __arm_vqshlq_m_r_s16 (__a
, __b
, __p
);
24598 __extension__
extern __inline int16x8_t
24599 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24600 __arm_vqrshlq_m_n (int16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
24602 return __arm_vqrshlq_m_n_s16 (__a
, __b
, __p
);
24605 __extension__
extern __inline int16x8_t
24606 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24607 __arm_vqnegq_m (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
24609 return __arm_vqnegq_m_s16 (__inactive
, __a
, __p
);
24612 __extension__
extern __inline int16x8_t
24613 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24614 __arm_vqabsq_m (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
24616 return __arm_vqabsq_m_s16 (__inactive
, __a
, __p
);
24619 __extension__
extern __inline int16x8_t
24620 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24621 __arm_vnegq_m (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
24623 return __arm_vnegq_m_s16 (__inactive
, __a
, __p
);
24626 __extension__
extern __inline int16x8_t
24627 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24628 __arm_vmvnq_m (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
24630 return __arm_vmvnq_m_s16 (__inactive
, __a
, __p
);
24633 __extension__
extern __inline
int32_t
24634 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24635 __arm_vmlsdavxq_p (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24637 return __arm_vmlsdavxq_p_s16 (__a
, __b
, __p
);
24640 __extension__
extern __inline
int32_t
24641 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24642 __arm_vmlsdavq_p (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24644 return __arm_vmlsdavq_p_s16 (__a
, __b
, __p
);
24647 __extension__
extern __inline
int32_t
24648 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24649 __arm_vmladavxq_p (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24651 return __arm_vmladavxq_p_s16 (__a
, __b
, __p
);
24654 __extension__
extern __inline
int32_t
24655 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24656 __arm_vmladavq_p (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24658 return __arm_vmladavq_p_s16 (__a
, __b
, __p
);
24661 __extension__
extern __inline
int16_t
24662 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24663 __arm_vminvq_p (int16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24665 return __arm_vminvq_p_s16 (__a
, __b
, __p
);
24668 __extension__
extern __inline
int16_t
24669 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24670 __arm_vmaxvq_p (int16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24672 return __arm_vmaxvq_p_s16 (__a
, __b
, __p
);
24675 __extension__
extern __inline int16x8_t
24676 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24677 __arm_vdupq_m (int16x8_t __inactive
, int16_t __a
, mve_pred16_t __p
)
24679 return __arm_vdupq_m_n_s16 (__inactive
, __a
, __p
);
24682 __extension__
extern __inline int16x8_t
24683 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24684 __arm_vclzq_m (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
24686 return __arm_vclzq_m_s16 (__inactive
, __a
, __p
);
24689 __extension__
extern __inline int16x8_t
24690 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24691 __arm_vclsq_m (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
24693 return __arm_vclsq_m_s16 (__inactive
, __a
, __p
);
24696 __extension__
extern __inline
int32_t
24697 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24698 __arm_vaddvaq_p (int32_t __a
, int16x8_t __b
, mve_pred16_t __p
)
24700 return __arm_vaddvaq_p_s16 (__a
, __b
, __p
);
24703 __extension__
extern __inline int16x8_t
24704 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24705 __arm_vabsq_m (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
24707 return __arm_vabsq_m_s16 (__inactive
, __a
, __p
);
24710 __extension__
extern __inline int16x8_t
24711 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24712 __arm_vqrdmlsdhxq (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
)
24714 return __arm_vqrdmlsdhxq_s16 (__inactive
, __a
, __b
);
24717 __extension__
extern __inline int16x8_t
24718 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24719 __arm_vqrdmlsdhq (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
)
24721 return __arm_vqrdmlsdhq_s16 (__inactive
, __a
, __b
);
24724 __extension__
extern __inline int16x8_t
24725 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24726 __arm_vqrdmlashq (int16x8_t __a
, int16x8_t __b
, int16_t __c
)
24728 return __arm_vqrdmlashq_n_s16 (__a
, __b
, __c
);
24731 __extension__
extern __inline int16x8_t
24732 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24733 __arm_vqdmlashq (int16x8_t __a
, int16x8_t __b
, int16_t __c
)
24735 return __arm_vqdmlashq_n_s16 (__a
, __b
, __c
);
24738 __extension__
extern __inline int16x8_t
24739 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24740 __arm_vqrdmlahq (int16x8_t __a
, int16x8_t __b
, int16_t __c
)
24742 return __arm_vqrdmlahq_n_s16 (__a
, __b
, __c
);
24745 __extension__
extern __inline int16x8_t
24746 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24747 __arm_vqrdmladhxq (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
)
24749 return __arm_vqrdmladhxq_s16 (__inactive
, __a
, __b
);
24752 __extension__
extern __inline int16x8_t
24753 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24754 __arm_vqrdmladhq (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
)
24756 return __arm_vqrdmladhq_s16 (__inactive
, __a
, __b
);
24759 __extension__
extern __inline int16x8_t
24760 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24761 __arm_vqdmlsdhxq (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
)
24763 return __arm_vqdmlsdhxq_s16 (__inactive
, __a
, __b
);
24766 __extension__
extern __inline int16x8_t
24767 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24768 __arm_vqdmlsdhq (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
)
24770 return __arm_vqdmlsdhq_s16 (__inactive
, __a
, __b
);
24773 __extension__
extern __inline int16x8_t
24774 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24775 __arm_vqdmlahq (int16x8_t __a
, int16x8_t __b
, int16_t __c
)
24777 return __arm_vqdmlahq_n_s16 (__a
, __b
, __c
);
24780 __extension__
extern __inline int16x8_t
24781 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24782 __arm_vqdmladhxq (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
)
24784 return __arm_vqdmladhxq_s16 (__inactive
, __a
, __b
);
24787 __extension__
extern __inline int16x8_t
24788 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24789 __arm_vqdmladhq (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
)
24791 return __arm_vqdmladhq_s16 (__inactive
, __a
, __b
);
24794 __extension__
extern __inline
int32_t
24795 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24796 __arm_vmlsdavaxq (int32_t __a
, int16x8_t __b
, int16x8_t __c
)
24798 return __arm_vmlsdavaxq_s16 (__a
, __b
, __c
);
24801 __extension__
extern __inline
int32_t
24802 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24803 __arm_vmlsdavaq (int32_t __a
, int16x8_t __b
, int16x8_t __c
)
24805 return __arm_vmlsdavaq_s16 (__a
, __b
, __c
);
24808 __extension__
extern __inline int16x8_t
24809 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24810 __arm_vmlasq (int16x8_t __a
, int16x8_t __b
, int16_t __c
)
24812 return __arm_vmlasq_n_s16 (__a
, __b
, __c
);
24815 __extension__
extern __inline int16x8_t
24816 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24817 __arm_vmlaq (int16x8_t __a
, int16x8_t __b
, int16_t __c
)
24819 return __arm_vmlaq_n_s16 (__a
, __b
, __c
);
24822 __extension__
extern __inline
int32_t
24823 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24824 __arm_vmladavaxq (int32_t __a
, int16x8_t __b
, int16x8_t __c
)
24826 return __arm_vmladavaxq_s16 (__a
, __b
, __c
);
24829 __extension__
extern __inline
int32_t
24830 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24831 __arm_vmladavaq (int32_t __a
, int16x8_t __b
, int16x8_t __c
)
24833 return __arm_vmladavaq_s16 (__a
, __b
, __c
);
24836 __extension__
extern __inline int16x8_t
24837 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24838 __arm_vsriq (int16x8_t __a
, int16x8_t __b
, const int __imm
)
24840 return __arm_vsriq_n_s16 (__a
, __b
, __imm
);
24843 __extension__
extern __inline int16x8_t
24844 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24845 __arm_vsliq (int16x8_t __a
, int16x8_t __b
, const int __imm
)
24847 return __arm_vsliq_n_s16 (__a
, __b
, __imm
);
24850 __extension__
extern __inline uint32x4_t
24851 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24852 __arm_vpselq (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
24854 return __arm_vpselq_u32 (__a
, __b
, __p
);
24857 __extension__
extern __inline int32x4_t
24858 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24859 __arm_vpselq (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
24861 return __arm_vpselq_s32 (__a
, __b
, __p
);
24864 __extension__
extern __inline uint32x4_t
24865 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24866 __arm_vrev64q_m (uint32x4_t __inactive
, uint32x4_t __a
, mve_pred16_t __p
)
24868 return __arm_vrev64q_m_u32 (__inactive
, __a
, __p
);
24871 __extension__
extern __inline uint32x4_t
24872 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24873 __arm_vmvnq_m (uint32x4_t __inactive
, uint32x4_t __a
, mve_pred16_t __p
)
24875 return __arm_vmvnq_m_u32 (__inactive
, __a
, __p
);
24878 __extension__
extern __inline uint32x4_t
24879 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24880 __arm_vmlasq (uint32x4_t __a
, uint32x4_t __b
, uint32_t __c
)
24882 return __arm_vmlasq_n_u32 (__a
, __b
, __c
);
24885 __extension__
extern __inline uint32x4_t
24886 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24887 __arm_vmlaq (uint32x4_t __a
, uint32x4_t __b
, uint32_t __c
)
24889 return __arm_vmlaq_n_u32 (__a
, __b
, __c
);
24892 __extension__
extern __inline
uint32_t
24893 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24894 __arm_vmladavq_p (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
24896 return __arm_vmladavq_p_u32 (__a
, __b
, __p
);
24899 __extension__
extern __inline
uint32_t
24900 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24901 __arm_vmladavaq (uint32_t __a
, uint32x4_t __b
, uint32x4_t __c
)
24903 return __arm_vmladavaq_u32 (__a
, __b
, __c
);
24906 __extension__
extern __inline
uint32_t
24907 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24908 __arm_vminvq_p (uint32_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
24910 return __arm_vminvq_p_u32 (__a
, __b
, __p
);
24913 __extension__
extern __inline
uint32_t
24914 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24915 __arm_vmaxvq_p (uint32_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
24917 return __arm_vmaxvq_p_u32 (__a
, __b
, __p
);
24920 __extension__
extern __inline uint32x4_t
24921 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24922 __arm_vdupq_m (uint32x4_t __inactive
, uint32_t __a
, mve_pred16_t __p
)
24924 return __arm_vdupq_m_n_u32 (__inactive
, __a
, __p
);
24927 __extension__
extern __inline mve_pred16_t
24928 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24929 __arm_vcmpneq_m (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
24931 return __arm_vcmpneq_m_u32 (__a
, __b
, __p
);
24934 __extension__
extern __inline mve_pred16_t
24935 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24936 __arm_vcmpneq_m (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
24938 return __arm_vcmpneq_m_n_u32 (__a
, __b
, __p
);
24941 __extension__
extern __inline mve_pred16_t
24942 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24943 __arm_vcmphiq_m (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
24945 return __arm_vcmphiq_m_u32 (__a
, __b
, __p
);
24948 __extension__
extern __inline mve_pred16_t
24949 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24950 __arm_vcmphiq_m (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
24952 return __arm_vcmphiq_m_n_u32 (__a
, __b
, __p
);
24955 __extension__
extern __inline mve_pred16_t
24956 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24957 __arm_vcmpeqq_m (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
24959 return __arm_vcmpeqq_m_u32 (__a
, __b
, __p
);
24962 __extension__
extern __inline mve_pred16_t
24963 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24964 __arm_vcmpeqq_m (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
24966 return __arm_vcmpeqq_m_n_u32 (__a
, __b
, __p
);
24969 __extension__
extern __inline mve_pred16_t
24970 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24971 __arm_vcmpcsq_m (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
24973 return __arm_vcmpcsq_m_u32 (__a
, __b
, __p
);
24976 __extension__
extern __inline mve_pred16_t
24977 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24978 __arm_vcmpcsq_m (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
24980 return __arm_vcmpcsq_m_n_u32 (__a
, __b
, __p
);
24983 __extension__
extern __inline uint32x4_t
24984 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24985 __arm_vclzq_m (uint32x4_t __inactive
, uint32x4_t __a
, mve_pred16_t __p
)
24987 return __arm_vclzq_m_u32 (__inactive
, __a
, __p
);
24990 __extension__
extern __inline
uint32_t
24991 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24992 __arm_vaddvaq_p (uint32_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
24994 return __arm_vaddvaq_p_u32 (__a
, __b
, __p
);
24997 __extension__
extern __inline uint32x4_t
24998 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
24999 __arm_vsriq (uint32x4_t __a
, uint32x4_t __b
, const int __imm
)
25001 return __arm_vsriq_n_u32 (__a
, __b
, __imm
);
25004 __extension__
extern __inline uint32x4_t
25005 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25006 __arm_vsliq (uint32x4_t __a
, uint32x4_t __b
, const int __imm
)
25008 return __arm_vsliq_n_u32 (__a
, __b
, __imm
);
25011 __extension__
extern __inline uint32x4_t
25012 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25013 __arm_vshlq_m_r (uint32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
25015 return __arm_vshlq_m_r_u32 (__a
, __b
, __p
);
25018 __extension__
extern __inline uint32x4_t
25019 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25020 __arm_vrshlq_m_n (uint32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
25022 return __arm_vrshlq_m_n_u32 (__a
, __b
, __p
);
25025 __extension__
extern __inline uint32x4_t
25026 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25027 __arm_vqshlq_m_r (uint32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
25029 return __arm_vqshlq_m_r_u32 (__a
, __b
, __p
);
25032 __extension__
extern __inline uint32x4_t
25033 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25034 __arm_vqrshlq_m_n (uint32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
25036 return __arm_vqrshlq_m_n_u32 (__a
, __b
, __p
);
25039 __extension__
extern __inline
uint32_t
25040 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25041 __arm_vminavq_p (uint32_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25043 return __arm_vminavq_p_s32 (__a
, __b
, __p
);
25046 __extension__
extern __inline uint32x4_t
25047 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25048 __arm_vminaq_m (uint32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25050 return __arm_vminaq_m_s32 (__a
, __b
, __p
);
25053 __extension__
extern __inline
uint32_t
25054 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25055 __arm_vmaxavq_p (uint32_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25057 return __arm_vmaxavq_p_s32 (__a
, __b
, __p
);
25060 __extension__
extern __inline uint32x4_t
25061 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25062 __arm_vmaxaq_m (uint32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25064 return __arm_vmaxaq_m_s32 (__a
, __b
, __p
);
25067 __extension__
extern __inline mve_pred16_t
25068 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25069 __arm_vcmpneq_m (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25071 return __arm_vcmpneq_m_s32 (__a
, __b
, __p
);
25074 __extension__
extern __inline mve_pred16_t
25075 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25076 __arm_vcmpneq_m (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
25078 return __arm_vcmpneq_m_n_s32 (__a
, __b
, __p
);
25081 __extension__
extern __inline mve_pred16_t
25082 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25083 __arm_vcmpltq_m (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25085 return __arm_vcmpltq_m_s32 (__a
, __b
, __p
);
25088 __extension__
extern __inline mve_pred16_t
25089 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25090 __arm_vcmpltq_m (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
25092 return __arm_vcmpltq_m_n_s32 (__a
, __b
, __p
);
25095 __extension__
extern __inline mve_pred16_t
25096 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25097 __arm_vcmpleq_m (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25099 return __arm_vcmpleq_m_s32 (__a
, __b
, __p
);
25102 __extension__
extern __inline mve_pred16_t
25103 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25104 __arm_vcmpleq_m (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
25106 return __arm_vcmpleq_m_n_s32 (__a
, __b
, __p
);
25109 __extension__
extern __inline mve_pred16_t
25110 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25111 __arm_vcmpgtq_m (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25113 return __arm_vcmpgtq_m_s32 (__a
, __b
, __p
);
25116 __extension__
extern __inline mve_pred16_t
25117 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25118 __arm_vcmpgtq_m (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
25120 return __arm_vcmpgtq_m_n_s32 (__a
, __b
, __p
);
25123 __extension__
extern __inline mve_pred16_t
25124 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25125 __arm_vcmpgeq_m (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25127 return __arm_vcmpgeq_m_s32 (__a
, __b
, __p
);
25130 __extension__
extern __inline mve_pred16_t
25131 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25132 __arm_vcmpgeq_m (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
25134 return __arm_vcmpgeq_m_n_s32 (__a
, __b
, __p
);
25137 __extension__
extern __inline mve_pred16_t
25138 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25139 __arm_vcmpeqq_m (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25141 return __arm_vcmpeqq_m_s32 (__a
, __b
, __p
);
25144 __extension__
extern __inline mve_pred16_t
25145 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25146 __arm_vcmpeqq_m (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
25148 return __arm_vcmpeqq_m_n_s32 (__a
, __b
, __p
);
25151 __extension__
extern __inline int32x4_t
25152 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25153 __arm_vshlq_m_r (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
25155 return __arm_vshlq_m_r_s32 (__a
, __b
, __p
);
25158 __extension__
extern __inline int32x4_t
25159 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25160 __arm_vrshlq_m_n (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
25162 return __arm_vrshlq_m_n_s32 (__a
, __b
, __p
);
25165 __extension__
extern __inline int32x4_t
25166 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25167 __arm_vrev64q_m (int32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
25169 return __arm_vrev64q_m_s32 (__inactive
, __a
, __p
);
25172 __extension__
extern __inline int32x4_t
25173 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25174 __arm_vqshlq_m_r (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
25176 return __arm_vqshlq_m_r_s32 (__a
, __b
, __p
);
25179 __extension__
extern __inline int32x4_t
25180 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25181 __arm_vqrshlq_m_n (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
25183 return __arm_vqrshlq_m_n_s32 (__a
, __b
, __p
);
25186 __extension__
extern __inline int32x4_t
25187 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25188 __arm_vqnegq_m (int32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
25190 return __arm_vqnegq_m_s32 (__inactive
, __a
, __p
);
25193 __extension__
extern __inline int32x4_t
25194 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25195 __arm_vqabsq_m (int32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
25197 return __arm_vqabsq_m_s32 (__inactive
, __a
, __p
);
25200 __extension__
extern __inline int32x4_t
25201 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25202 __arm_vnegq_m (int32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
25204 return __arm_vnegq_m_s32 (__inactive
, __a
, __p
);
25207 __extension__
extern __inline int32x4_t
25208 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25209 __arm_vmvnq_m (int32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
25211 return __arm_vmvnq_m_s32 (__inactive
, __a
, __p
);
25214 __extension__
extern __inline
int32_t
25215 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25216 __arm_vmlsdavxq_p (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25218 return __arm_vmlsdavxq_p_s32 (__a
, __b
, __p
);
25221 __extension__
extern __inline
int32_t
25222 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25223 __arm_vmlsdavq_p (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25225 return __arm_vmlsdavq_p_s32 (__a
, __b
, __p
);
25228 __extension__
extern __inline
int32_t
25229 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25230 __arm_vmladavxq_p (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25232 return __arm_vmladavxq_p_s32 (__a
, __b
, __p
);
25235 __extension__
extern __inline
int32_t
25236 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25237 __arm_vmladavq_p (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25239 return __arm_vmladavq_p_s32 (__a
, __b
, __p
);
25242 __extension__
extern __inline
int32_t
25243 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25244 __arm_vminvq_p (int32_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25246 return __arm_vminvq_p_s32 (__a
, __b
, __p
);
25249 __extension__
extern __inline
int32_t
25250 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25251 __arm_vmaxvq_p (int32_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25253 return __arm_vmaxvq_p_s32 (__a
, __b
, __p
);
25256 __extension__
extern __inline int32x4_t
25257 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25258 __arm_vdupq_m (int32x4_t __inactive
, int32_t __a
, mve_pred16_t __p
)
25260 return __arm_vdupq_m_n_s32 (__inactive
, __a
, __p
);
25263 __extension__
extern __inline int32x4_t
25264 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25265 __arm_vclzq_m (int32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
25267 return __arm_vclzq_m_s32 (__inactive
, __a
, __p
);
25270 __extension__
extern __inline int32x4_t
25271 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25272 __arm_vclsq_m (int32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
25274 return __arm_vclsq_m_s32 (__inactive
, __a
, __p
);
25277 __extension__
extern __inline
int32_t
25278 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25279 __arm_vaddvaq_p (int32_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25281 return __arm_vaddvaq_p_s32 (__a
, __b
, __p
);
25284 __extension__
extern __inline int32x4_t
25285 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25286 __arm_vabsq_m (int32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
25288 return __arm_vabsq_m_s32 (__inactive
, __a
, __p
);
25291 __extension__
extern __inline int32x4_t
25292 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25293 __arm_vqrdmlsdhxq (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
)
25295 return __arm_vqrdmlsdhxq_s32 (__inactive
, __a
, __b
);
25298 __extension__
extern __inline int32x4_t
25299 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25300 __arm_vqrdmlsdhq (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
)
25302 return __arm_vqrdmlsdhq_s32 (__inactive
, __a
, __b
);
25305 __extension__
extern __inline int32x4_t
25306 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25307 __arm_vqrdmlashq (int32x4_t __a
, int32x4_t __b
, int32_t __c
)
25309 return __arm_vqrdmlashq_n_s32 (__a
, __b
, __c
);
25312 __extension__
extern __inline int32x4_t
25313 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25314 __arm_vqdmlashq (int32x4_t __a
, int32x4_t __b
, int32_t __c
)
25316 return __arm_vqdmlashq_n_s32 (__a
, __b
, __c
);
25319 __extension__
extern __inline int32x4_t
25320 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25321 __arm_vqrdmlahq (int32x4_t __a
, int32x4_t __b
, int32_t __c
)
25323 return __arm_vqrdmlahq_n_s32 (__a
, __b
, __c
);
25326 __extension__
extern __inline int32x4_t
25327 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25328 __arm_vqrdmladhxq (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
)
25330 return __arm_vqrdmladhxq_s32 (__inactive
, __a
, __b
);
25333 __extension__
extern __inline int32x4_t
25334 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25335 __arm_vqrdmladhq (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
)
25337 return __arm_vqrdmladhq_s32 (__inactive
, __a
, __b
);
25340 __extension__
extern __inline int32x4_t
25341 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25342 __arm_vqdmlsdhxq (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
)
25344 return __arm_vqdmlsdhxq_s32 (__inactive
, __a
, __b
);
25347 __extension__
extern __inline int32x4_t
25348 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25349 __arm_vqdmlsdhq (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
)
25351 return __arm_vqdmlsdhq_s32 (__inactive
, __a
, __b
);
25354 __extension__
extern __inline int32x4_t
25355 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25356 __arm_vqdmlahq (int32x4_t __a
, int32x4_t __b
, int32_t __c
)
25358 return __arm_vqdmlahq_n_s32 (__a
, __b
, __c
);
25361 __extension__
extern __inline int32x4_t
25362 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25363 __arm_vqdmladhxq (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
)
25365 return __arm_vqdmladhxq_s32 (__inactive
, __a
, __b
);
25368 __extension__
extern __inline int32x4_t
25369 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25370 __arm_vqdmladhq (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
)
25372 return __arm_vqdmladhq_s32 (__inactive
, __a
, __b
);
25375 __extension__
extern __inline
int32_t
25376 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25377 __arm_vmlsdavaxq (int32_t __a
, int32x4_t __b
, int32x4_t __c
)
25379 return __arm_vmlsdavaxq_s32 (__a
, __b
, __c
);
25382 __extension__
extern __inline
int32_t
25383 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25384 __arm_vmlsdavaq (int32_t __a
, int32x4_t __b
, int32x4_t __c
)
25386 return __arm_vmlsdavaq_s32 (__a
, __b
, __c
);
25389 __extension__
extern __inline int32x4_t
25390 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25391 __arm_vmlasq (int32x4_t __a
, int32x4_t __b
, int32_t __c
)
25393 return __arm_vmlasq_n_s32 (__a
, __b
, __c
);
25396 __extension__
extern __inline int32x4_t
25397 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25398 __arm_vmlaq (int32x4_t __a
, int32x4_t __b
, int32_t __c
)
25400 return __arm_vmlaq_n_s32 (__a
, __b
, __c
);
25403 __extension__
extern __inline
int32_t
25404 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25405 __arm_vmladavaxq (int32_t __a
, int32x4_t __b
, int32x4_t __c
)
25407 return __arm_vmladavaxq_s32 (__a
, __b
, __c
);
25410 __extension__
extern __inline
int32_t
25411 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25412 __arm_vmladavaq (int32_t __a
, int32x4_t __b
, int32x4_t __c
)
25414 return __arm_vmladavaq_s32 (__a
, __b
, __c
);
25417 __extension__
extern __inline int32x4_t
25418 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25419 __arm_vsriq (int32x4_t __a
, int32x4_t __b
, const int __imm
)
25421 return __arm_vsriq_n_s32 (__a
, __b
, __imm
);
25424 __extension__
extern __inline int32x4_t
25425 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25426 __arm_vsliq (int32x4_t __a
, int32x4_t __b
, const int __imm
)
25428 return __arm_vsliq_n_s32 (__a
, __b
, __imm
);
25431 __extension__
extern __inline uint64x2_t
25432 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25433 __arm_vpselq (uint64x2_t __a
, uint64x2_t __b
, mve_pred16_t __p
)
25435 return __arm_vpselq_u64 (__a
, __b
, __p
);
25438 __extension__
extern __inline int64x2_t
25439 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25440 __arm_vpselq (int64x2_t __a
, int64x2_t __b
, mve_pred16_t __p
)
25442 return __arm_vpselq_s64 (__a
, __b
, __p
);
25445 __extension__
extern __inline
int64_t
25446 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25447 __arm_vrmlaldavhaxq (int64_t __a
, int32x4_t __b
, int32x4_t __c
)
25449 return __arm_vrmlaldavhaxq_s32 (__a
, __b
, __c
);
25452 __extension__
extern __inline
int64_t
25453 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25454 __arm_vrmlsldavhaq (int64_t __a
, int32x4_t __b
, int32x4_t __c
)
25456 return __arm_vrmlsldavhaq_s32 (__a
, __b
, __c
);
25459 __extension__
extern __inline
int64_t
25460 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25461 __arm_vrmlsldavhaxq (int64_t __a
, int32x4_t __b
, int32x4_t __c
)
25463 return __arm_vrmlsldavhaxq_s32 (__a
, __b
, __c
);
25466 __extension__
extern __inline
int64_t
25467 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25468 __arm_vaddlvaq_p (int64_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25470 return __arm_vaddlvaq_p_s32 (__a
, __b
, __p
);
25473 __extension__
extern __inline int8x16_t
25474 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25475 __arm_vrev16q_m (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
25477 return __arm_vrev16q_m_s8 (__inactive
, __a
, __p
);
25480 __extension__
extern __inline
int64_t
25481 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25482 __arm_vrmlaldavhq_p (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25484 return __arm_vrmlaldavhq_p_s32 (__a
, __b
, __p
);
25487 __extension__
extern __inline
int64_t
25488 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25489 __arm_vrmlaldavhxq_p (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25491 return __arm_vrmlaldavhxq_p_s32 (__a
, __b
, __p
);
25494 __extension__
extern __inline
int64_t
25495 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25496 __arm_vrmlsldavhq_p (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25498 return __arm_vrmlsldavhq_p_s32 (__a
, __b
, __p
);
25501 __extension__
extern __inline
int64_t
25502 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25503 __arm_vrmlsldavhxq_p (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25505 return __arm_vrmlsldavhxq_p_s32 (__a
, __b
, __p
);
25508 __extension__
extern __inline
uint64_t
25509 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25510 __arm_vaddlvaq_p (uint64_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
25512 return __arm_vaddlvaq_p_u32 (__a
, __b
, __p
);
25515 __extension__
extern __inline uint8x16_t
25516 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25517 __arm_vrev16q_m (uint8x16_t __inactive
, uint8x16_t __a
, mve_pred16_t __p
)
25519 return __arm_vrev16q_m_u8 (__inactive
, __a
, __p
);
25522 __extension__
extern __inline
uint64_t
25523 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25524 __arm_vrmlaldavhq_p (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
25526 return __arm_vrmlaldavhq_p_u32 (__a
, __b
, __p
);
25529 __extension__
extern __inline int16x8_t
25530 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25531 __arm_vmvnq_m (int16x8_t __inactive
, const int __imm
, mve_pred16_t __p
)
25533 return __arm_vmvnq_m_n_s16 (__inactive
, __imm
, __p
);
25536 __extension__
extern __inline int16x8_t
25537 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25538 __arm_vorrq_m_n (int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
25540 return __arm_vorrq_m_n_s16 (__a
, __imm
, __p
);
25543 __extension__
extern __inline int8x16_t
25544 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25545 __arm_vqrshrntq (int8x16_t __a
, int16x8_t __b
, const int __imm
)
25547 return __arm_vqrshrntq_n_s16 (__a
, __b
, __imm
);
25550 __extension__
extern __inline int8x16_t
25551 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25552 __arm_vqshrnbq (int8x16_t __a
, int16x8_t __b
, const int __imm
)
25554 return __arm_vqshrnbq_n_s16 (__a
, __b
, __imm
);
25557 __extension__
extern __inline int8x16_t
25558 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25559 __arm_vqshrntq (int8x16_t __a
, int16x8_t __b
, const int __imm
)
25561 return __arm_vqshrntq_n_s16 (__a
, __b
, __imm
);
25564 __extension__
extern __inline int8x16_t
25565 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25566 __arm_vrshrnbq (int8x16_t __a
, int16x8_t __b
, const int __imm
)
25568 return __arm_vrshrnbq_n_s16 (__a
, __b
, __imm
);
25571 __extension__
extern __inline int8x16_t
25572 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25573 __arm_vrshrntq (int8x16_t __a
, int16x8_t __b
, const int __imm
)
25575 return __arm_vrshrntq_n_s16 (__a
, __b
, __imm
);
25578 __extension__
extern __inline int8x16_t
25579 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25580 __arm_vshrnbq (int8x16_t __a
, int16x8_t __b
, const int __imm
)
25582 return __arm_vshrnbq_n_s16 (__a
, __b
, __imm
);
25585 __extension__
extern __inline int8x16_t
25586 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25587 __arm_vshrntq (int8x16_t __a
, int16x8_t __b
, const int __imm
)
25589 return __arm_vshrntq_n_s16 (__a
, __b
, __imm
);
25592 __extension__
extern __inline
int64_t
25593 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25594 __arm_vmlaldavaq (int64_t __a
, int16x8_t __b
, int16x8_t __c
)
25596 return __arm_vmlaldavaq_s16 (__a
, __b
, __c
);
25599 __extension__
extern __inline
int64_t
25600 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25601 __arm_vmlaldavaxq (int64_t __a
, int16x8_t __b
, int16x8_t __c
)
25603 return __arm_vmlaldavaxq_s16 (__a
, __b
, __c
);
25606 __extension__
extern __inline
int64_t
25607 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25608 __arm_vmlsldavaq (int64_t __a
, int16x8_t __b
, int16x8_t __c
)
25610 return __arm_vmlsldavaq_s16 (__a
, __b
, __c
);
25613 __extension__
extern __inline
int64_t
25614 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25615 __arm_vmlsldavaxq (int64_t __a
, int16x8_t __b
, int16x8_t __c
)
25617 return __arm_vmlsldavaxq_s16 (__a
, __b
, __c
);
25620 __extension__
extern __inline
int64_t
25621 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25622 __arm_vmlaldavq_p (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
25624 return __arm_vmlaldavq_p_s16 (__a
, __b
, __p
);
25627 __extension__
extern __inline
int64_t
25628 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25629 __arm_vmlaldavxq_p (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
25631 return __arm_vmlaldavxq_p_s16 (__a
, __b
, __p
);
25634 __extension__
extern __inline
int64_t
25635 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25636 __arm_vmlsldavq_p (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
25638 return __arm_vmlsldavq_p_s16 (__a
, __b
, __p
);
25641 __extension__
extern __inline
int64_t
25642 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25643 __arm_vmlsldavxq_p (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
25645 return __arm_vmlsldavxq_p_s16 (__a
, __b
, __p
);
25648 __extension__
extern __inline int16x8_t
25649 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25650 __arm_vmovlbq_m (int16x8_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
25652 return __arm_vmovlbq_m_s8 (__inactive
, __a
, __p
);
25655 __extension__
extern __inline int16x8_t
25656 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25657 __arm_vmovltq_m (int16x8_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
25659 return __arm_vmovltq_m_s8 (__inactive
, __a
, __p
);
25662 __extension__
extern __inline int8x16_t
25663 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25664 __arm_vmovnbq_m (int8x16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
25666 return __arm_vmovnbq_m_s16 (__a
, __b
, __p
);
25669 __extension__
extern __inline int8x16_t
25670 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25671 __arm_vmovntq_m (int8x16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
25673 return __arm_vmovntq_m_s16 (__a
, __b
, __p
);
25676 __extension__
extern __inline int8x16_t
25677 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25678 __arm_vqmovnbq_m (int8x16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
25680 return __arm_vqmovnbq_m_s16 (__a
, __b
, __p
);
25683 __extension__
extern __inline int8x16_t
25684 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25685 __arm_vqmovntq_m (int8x16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
25687 return __arm_vqmovntq_m_s16 (__a
, __b
, __p
);
25690 __extension__
extern __inline int8x16_t
25691 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25692 __arm_vrev32q_m (int8x16_t __inactive
, int8x16_t __a
, mve_pred16_t __p
)
25694 return __arm_vrev32q_m_s8 (__inactive
, __a
, __p
);
25697 __extension__
extern __inline uint16x8_t
25698 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25699 __arm_vmvnq_m (uint16x8_t __inactive
, const int __imm
, mve_pred16_t __p
)
25701 return __arm_vmvnq_m_n_u16 (__inactive
, __imm
, __p
);
25704 __extension__
extern __inline uint16x8_t
25705 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25706 __arm_vorrq_m_n (uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
25708 return __arm_vorrq_m_n_u16 (__a
, __imm
, __p
);
25711 __extension__
extern __inline uint8x16_t
25712 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25713 __arm_vqrshruntq (uint8x16_t __a
, int16x8_t __b
, const int __imm
)
25715 return __arm_vqrshruntq_n_s16 (__a
, __b
, __imm
);
25718 __extension__
extern __inline uint8x16_t
25719 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25720 __arm_vqshrunbq (uint8x16_t __a
, int16x8_t __b
, const int __imm
)
25722 return __arm_vqshrunbq_n_s16 (__a
, __b
, __imm
);
25725 __extension__
extern __inline uint8x16_t
25726 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25727 __arm_vqshruntq (uint8x16_t __a
, int16x8_t __b
, const int __imm
)
25729 return __arm_vqshruntq_n_s16 (__a
, __b
, __imm
);
25732 __extension__
extern __inline uint8x16_t
25733 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25734 __arm_vqmovunbq_m (uint8x16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
25736 return __arm_vqmovunbq_m_s16 (__a
, __b
, __p
);
25739 __extension__
extern __inline uint8x16_t
25740 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25741 __arm_vqmovuntq_m (uint8x16_t __a
, int16x8_t __b
, mve_pred16_t __p
)
25743 return __arm_vqmovuntq_m_s16 (__a
, __b
, __p
);
25746 __extension__
extern __inline uint8x16_t
25747 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25748 __arm_vqrshrntq (uint8x16_t __a
, uint16x8_t __b
, const int __imm
)
25750 return __arm_vqrshrntq_n_u16 (__a
, __b
, __imm
);
25753 __extension__
extern __inline uint8x16_t
25754 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25755 __arm_vqshrnbq (uint8x16_t __a
, uint16x8_t __b
, const int __imm
)
25757 return __arm_vqshrnbq_n_u16 (__a
, __b
, __imm
);
25760 __extension__
extern __inline uint8x16_t
25761 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25762 __arm_vqshrntq (uint8x16_t __a
, uint16x8_t __b
, const int __imm
)
25764 return __arm_vqshrntq_n_u16 (__a
, __b
, __imm
);
25767 __extension__
extern __inline uint8x16_t
25768 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25769 __arm_vrshrnbq (uint8x16_t __a
, uint16x8_t __b
, const int __imm
)
25771 return __arm_vrshrnbq_n_u16 (__a
, __b
, __imm
);
25774 __extension__
extern __inline uint8x16_t
25775 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25776 __arm_vrshrntq (uint8x16_t __a
, uint16x8_t __b
, const int __imm
)
25778 return __arm_vrshrntq_n_u16 (__a
, __b
, __imm
);
25781 __extension__
extern __inline uint8x16_t
25782 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25783 __arm_vshrnbq (uint8x16_t __a
, uint16x8_t __b
, const int __imm
)
25785 return __arm_vshrnbq_n_u16 (__a
, __b
, __imm
);
25788 __extension__
extern __inline uint8x16_t
25789 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25790 __arm_vshrntq (uint8x16_t __a
, uint16x8_t __b
, const int __imm
)
25792 return __arm_vshrntq_n_u16 (__a
, __b
, __imm
);
25795 __extension__
extern __inline
uint64_t
25796 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25797 __arm_vmlaldavaq (uint64_t __a
, uint16x8_t __b
, uint16x8_t __c
)
25799 return __arm_vmlaldavaq_u16 (__a
, __b
, __c
);
25802 __extension__
extern __inline
uint64_t
25803 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25804 __arm_vmlaldavq_p (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
25806 return __arm_vmlaldavq_p_u16 (__a
, __b
, __p
);
25809 __extension__
extern __inline uint16x8_t
25810 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25811 __arm_vmovlbq_m (uint16x8_t __inactive
, uint8x16_t __a
, mve_pred16_t __p
)
25813 return __arm_vmovlbq_m_u8 (__inactive
, __a
, __p
);
25816 __extension__
extern __inline uint16x8_t
25817 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25818 __arm_vmovltq_m (uint16x8_t __inactive
, uint8x16_t __a
, mve_pred16_t __p
)
25820 return __arm_vmovltq_m_u8 (__inactive
, __a
, __p
);
25823 __extension__
extern __inline uint8x16_t
25824 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25825 __arm_vmovnbq_m (uint8x16_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
25827 return __arm_vmovnbq_m_u16 (__a
, __b
, __p
);
25830 __extension__
extern __inline uint8x16_t
25831 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25832 __arm_vmovntq_m (uint8x16_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
25834 return __arm_vmovntq_m_u16 (__a
, __b
, __p
);
25837 __extension__
extern __inline uint8x16_t
25838 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25839 __arm_vqmovnbq_m (uint8x16_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
25841 return __arm_vqmovnbq_m_u16 (__a
, __b
, __p
);
25844 __extension__
extern __inline uint8x16_t
25845 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25846 __arm_vqmovntq_m (uint8x16_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
25848 return __arm_vqmovntq_m_u16 (__a
, __b
, __p
);
25851 __extension__
extern __inline uint8x16_t
25852 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25853 __arm_vrev32q_m (uint8x16_t __inactive
, uint8x16_t __a
, mve_pred16_t __p
)
25855 return __arm_vrev32q_m_u8 (__inactive
, __a
, __p
);
25858 __extension__
extern __inline int32x4_t
25859 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25860 __arm_vmvnq_m (int32x4_t __inactive
, const int __imm
, mve_pred16_t __p
)
25862 return __arm_vmvnq_m_n_s32 (__inactive
, __imm
, __p
);
25865 __extension__
extern __inline int32x4_t
25866 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25867 __arm_vorrq_m_n (int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
25869 return __arm_vorrq_m_n_s32 (__a
, __imm
, __p
);
25872 __extension__
extern __inline int16x8_t
25873 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25874 __arm_vqrshrntq (int16x8_t __a
, int32x4_t __b
, const int __imm
)
25876 return __arm_vqrshrntq_n_s32 (__a
, __b
, __imm
);
25879 __extension__
extern __inline int16x8_t
25880 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25881 __arm_vqshrnbq (int16x8_t __a
, int32x4_t __b
, const int __imm
)
25883 return __arm_vqshrnbq_n_s32 (__a
, __b
, __imm
);
25886 __extension__
extern __inline int16x8_t
25887 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25888 __arm_vqshrntq (int16x8_t __a
, int32x4_t __b
, const int __imm
)
25890 return __arm_vqshrntq_n_s32 (__a
, __b
, __imm
);
25893 __extension__
extern __inline int16x8_t
25894 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25895 __arm_vrshrnbq (int16x8_t __a
, int32x4_t __b
, const int __imm
)
25897 return __arm_vrshrnbq_n_s32 (__a
, __b
, __imm
);
25900 __extension__
extern __inline int16x8_t
25901 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25902 __arm_vrshrntq (int16x8_t __a
, int32x4_t __b
, const int __imm
)
25904 return __arm_vrshrntq_n_s32 (__a
, __b
, __imm
);
25907 __extension__
extern __inline int16x8_t
25908 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25909 __arm_vshrnbq (int16x8_t __a
, int32x4_t __b
, const int __imm
)
25911 return __arm_vshrnbq_n_s32 (__a
, __b
, __imm
);
25914 __extension__
extern __inline int16x8_t
25915 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25916 __arm_vshrntq (int16x8_t __a
, int32x4_t __b
, const int __imm
)
25918 return __arm_vshrntq_n_s32 (__a
, __b
, __imm
);
25921 __extension__
extern __inline
int64_t
25922 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25923 __arm_vmlaldavaq (int64_t __a
, int32x4_t __b
, int32x4_t __c
)
25925 return __arm_vmlaldavaq_s32 (__a
, __b
, __c
);
25928 __extension__
extern __inline
int64_t
25929 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25930 __arm_vmlaldavaxq (int64_t __a
, int32x4_t __b
, int32x4_t __c
)
25932 return __arm_vmlaldavaxq_s32 (__a
, __b
, __c
);
25935 __extension__
extern __inline
int64_t
25936 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25937 __arm_vmlsldavaq (int64_t __a
, int32x4_t __b
, int32x4_t __c
)
25939 return __arm_vmlsldavaq_s32 (__a
, __b
, __c
);
25942 __extension__
extern __inline
int64_t
25943 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25944 __arm_vmlsldavaxq (int64_t __a
, int32x4_t __b
, int32x4_t __c
)
25946 return __arm_vmlsldavaxq_s32 (__a
, __b
, __c
);
25949 __extension__
extern __inline
int64_t
25950 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25951 __arm_vmlaldavq_p (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25953 return __arm_vmlaldavq_p_s32 (__a
, __b
, __p
);
25956 __extension__
extern __inline
int64_t
25957 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25958 __arm_vmlaldavxq_p (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25960 return __arm_vmlaldavxq_p_s32 (__a
, __b
, __p
);
25963 __extension__
extern __inline
int64_t
25964 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25965 __arm_vmlsldavq_p (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25967 return __arm_vmlsldavq_p_s32 (__a
, __b
, __p
);
25970 __extension__
extern __inline
int64_t
25971 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25972 __arm_vmlsldavxq_p (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25974 return __arm_vmlsldavxq_p_s32 (__a
, __b
, __p
);
25977 __extension__
extern __inline int32x4_t
25978 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25979 __arm_vmovlbq_m (int32x4_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
25981 return __arm_vmovlbq_m_s16 (__inactive
, __a
, __p
);
25984 __extension__
extern __inline int32x4_t
25985 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25986 __arm_vmovltq_m (int32x4_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
25988 return __arm_vmovltq_m_s16 (__inactive
, __a
, __p
);
25991 __extension__
extern __inline int16x8_t
25992 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
25993 __arm_vmovnbq_m (int16x8_t __a
, int32x4_t __b
, mve_pred16_t __p
)
25995 return __arm_vmovnbq_m_s32 (__a
, __b
, __p
);
25998 __extension__
extern __inline int16x8_t
25999 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26000 __arm_vmovntq_m (int16x8_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26002 return __arm_vmovntq_m_s32 (__a
, __b
, __p
);
26005 __extension__
extern __inline int16x8_t
26006 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26007 __arm_vqmovnbq_m (int16x8_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26009 return __arm_vqmovnbq_m_s32 (__a
, __b
, __p
);
26012 __extension__
extern __inline int16x8_t
26013 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26014 __arm_vqmovntq_m (int16x8_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26016 return __arm_vqmovntq_m_s32 (__a
, __b
, __p
);
26019 __extension__
extern __inline int16x8_t
26020 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26021 __arm_vrev32q_m (int16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
26023 return __arm_vrev32q_m_s16 (__inactive
, __a
, __p
);
26026 __extension__
extern __inline uint32x4_t
26027 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26028 __arm_vmvnq_m (uint32x4_t __inactive
, const int __imm
, mve_pred16_t __p
)
26030 return __arm_vmvnq_m_n_u32 (__inactive
, __imm
, __p
);
26033 __extension__
extern __inline uint32x4_t
26034 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26035 __arm_vorrq_m_n (uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
26037 return __arm_vorrq_m_n_u32 (__a
, __imm
, __p
);
26040 __extension__
extern __inline uint16x8_t
26041 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26042 __arm_vqrshruntq (uint16x8_t __a
, int32x4_t __b
, const int __imm
)
26044 return __arm_vqrshruntq_n_s32 (__a
, __b
, __imm
);
26047 __extension__
extern __inline uint16x8_t
26048 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26049 __arm_vqshrunbq (uint16x8_t __a
, int32x4_t __b
, const int __imm
)
26051 return __arm_vqshrunbq_n_s32 (__a
, __b
, __imm
);
26054 __extension__
extern __inline uint16x8_t
26055 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26056 __arm_vqshruntq (uint16x8_t __a
, int32x4_t __b
, const int __imm
)
26058 return __arm_vqshruntq_n_s32 (__a
, __b
, __imm
);
26061 __extension__
extern __inline uint16x8_t
26062 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26063 __arm_vqmovunbq_m (uint16x8_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26065 return __arm_vqmovunbq_m_s32 (__a
, __b
, __p
);
26068 __extension__
extern __inline uint16x8_t
26069 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26070 __arm_vqmovuntq_m (uint16x8_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26072 return __arm_vqmovuntq_m_s32 (__a
, __b
, __p
);
26075 __extension__
extern __inline uint16x8_t
26076 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26077 __arm_vqrshrntq (uint16x8_t __a
, uint32x4_t __b
, const int __imm
)
26079 return __arm_vqrshrntq_n_u32 (__a
, __b
, __imm
);
26082 __extension__
extern __inline uint16x8_t
26083 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26084 __arm_vqshrnbq (uint16x8_t __a
, uint32x4_t __b
, const int __imm
)
26086 return __arm_vqshrnbq_n_u32 (__a
, __b
, __imm
);
26089 __extension__
extern __inline uint16x8_t
26090 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26091 __arm_vqshrntq (uint16x8_t __a
, uint32x4_t __b
, const int __imm
)
26093 return __arm_vqshrntq_n_u32 (__a
, __b
, __imm
);
26096 __extension__
extern __inline uint16x8_t
26097 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26098 __arm_vrshrnbq (uint16x8_t __a
, uint32x4_t __b
, const int __imm
)
26100 return __arm_vrshrnbq_n_u32 (__a
, __b
, __imm
);
26103 __extension__
extern __inline uint16x8_t
26104 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26105 __arm_vrshrntq (uint16x8_t __a
, uint32x4_t __b
, const int __imm
)
26107 return __arm_vrshrntq_n_u32 (__a
, __b
, __imm
);
26110 __extension__
extern __inline uint16x8_t
26111 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26112 __arm_vshrnbq (uint16x8_t __a
, uint32x4_t __b
, const int __imm
)
26114 return __arm_vshrnbq_n_u32 (__a
, __b
, __imm
);
26117 __extension__
extern __inline uint16x8_t
26118 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26119 __arm_vshrntq (uint16x8_t __a
, uint32x4_t __b
, const int __imm
)
26121 return __arm_vshrntq_n_u32 (__a
, __b
, __imm
);
26124 __extension__
extern __inline
uint64_t
26125 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26126 __arm_vmlaldavaq (uint64_t __a
, uint32x4_t __b
, uint32x4_t __c
)
26128 return __arm_vmlaldavaq_u32 (__a
, __b
, __c
);
26131 __extension__
extern __inline
uint64_t
26132 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26133 __arm_vmlaldavq_p (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
26135 return __arm_vmlaldavq_p_u32 (__a
, __b
, __p
);
26138 __extension__
extern __inline uint32x4_t
26139 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26140 __arm_vmovlbq_m (uint32x4_t __inactive
, uint16x8_t __a
, mve_pred16_t __p
)
26142 return __arm_vmovlbq_m_u16 (__inactive
, __a
, __p
);
26145 __extension__
extern __inline uint32x4_t
26146 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26147 __arm_vmovltq_m (uint32x4_t __inactive
, uint16x8_t __a
, mve_pred16_t __p
)
26149 return __arm_vmovltq_m_u16 (__inactive
, __a
, __p
);
26152 __extension__
extern __inline uint16x8_t
26153 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26154 __arm_vmovnbq_m (uint16x8_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
26156 return __arm_vmovnbq_m_u32 (__a
, __b
, __p
);
26159 __extension__
extern __inline uint16x8_t
26160 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26161 __arm_vmovntq_m (uint16x8_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
26163 return __arm_vmovntq_m_u32 (__a
, __b
, __p
);
26166 __extension__
extern __inline uint16x8_t
26167 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26168 __arm_vqmovnbq_m (uint16x8_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
26170 return __arm_vqmovnbq_m_u32 (__a
, __b
, __p
);
26173 __extension__
extern __inline uint16x8_t
26174 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26175 __arm_vqmovntq_m (uint16x8_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
26177 return __arm_vqmovntq_m_u32 (__a
, __b
, __p
);
26180 __extension__
extern __inline uint16x8_t
26181 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26182 __arm_vrev32q_m (uint16x8_t __inactive
, uint16x8_t __a
, mve_pred16_t __p
)
26184 return __arm_vrev32q_m_u16 (__inactive
, __a
, __p
);
26187 __extension__
extern __inline int8x16_t
26188 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26189 __arm_vsriq_m (int8x16_t __a
, int8x16_t __b
, const int __imm
, mve_pred16_t __p
)
26191 return __arm_vsriq_m_n_s8 (__a
, __b
, __imm
, __p
);
26194 __extension__
extern __inline int8x16_t
26195 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26196 __arm_vsubq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
26198 return __arm_vsubq_m_s8 (__inactive
, __a
, __b
, __p
);
26201 __extension__
extern __inline uint8x16_t
26202 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26203 __arm_vqshluq_m (uint8x16_t __inactive
, int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
26205 return __arm_vqshluq_m_n_s8 (__inactive
, __a
, __imm
, __p
);
26208 __extension__
extern __inline
uint32_t
26209 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26210 __arm_vabavq_p (uint32_t __a
, int8x16_t __b
, int8x16_t __c
, mve_pred16_t __p
)
26212 return __arm_vabavq_p_s8 (__a
, __b
, __c
, __p
);
26215 __extension__
extern __inline uint8x16_t
26216 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26217 __arm_vsriq_m (uint8x16_t __a
, uint8x16_t __b
, const int __imm
, mve_pred16_t __p
)
26219 return __arm_vsriq_m_n_u8 (__a
, __b
, __imm
, __p
);
26222 __extension__
extern __inline uint8x16_t
26223 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26224 __arm_vshlq_m (uint8x16_t __inactive
, uint8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
26226 return __arm_vshlq_m_u8 (__inactive
, __a
, __b
, __p
);
26229 __extension__
extern __inline uint8x16_t
26230 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26231 __arm_vsubq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
26233 return __arm_vsubq_m_u8 (__inactive
, __a
, __b
, __p
);
26236 __extension__
extern __inline
uint32_t
26237 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26238 __arm_vabavq_p (uint32_t __a
, uint8x16_t __b
, uint8x16_t __c
, mve_pred16_t __p
)
26240 return __arm_vabavq_p_u8 (__a
, __b
, __c
, __p
);
26243 __extension__
extern __inline int8x16_t
26244 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26245 __arm_vshlq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
26247 return __arm_vshlq_m_s8 (__inactive
, __a
, __b
, __p
);
26250 __extension__
extern __inline int16x8_t
26251 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26252 __arm_vsriq_m (int16x8_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
26254 return __arm_vsriq_m_n_s16 (__a
, __b
, __imm
, __p
);
26257 __extension__
extern __inline int16x8_t
26258 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26259 __arm_vsubq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
26261 return __arm_vsubq_m_s16 (__inactive
, __a
, __b
, __p
);
26264 __extension__
extern __inline uint16x8_t
26265 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26266 __arm_vqshluq_m (uint16x8_t __inactive
, int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
26268 return __arm_vqshluq_m_n_s16 (__inactive
, __a
, __imm
, __p
);
26271 __extension__
extern __inline
uint32_t
26272 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26273 __arm_vabavq_p (uint32_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
26275 return __arm_vabavq_p_s16 (__a
, __b
, __c
, __p
);
26278 __extension__
extern __inline uint16x8_t
26279 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26280 __arm_vsriq_m (uint16x8_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
26282 return __arm_vsriq_m_n_u16 (__a
, __b
, __imm
, __p
);
26285 __extension__
extern __inline uint16x8_t
26286 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26287 __arm_vshlq_m (uint16x8_t __inactive
, uint16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
26289 return __arm_vshlq_m_u16 (__inactive
, __a
, __b
, __p
);
26292 __extension__
extern __inline uint16x8_t
26293 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26294 __arm_vsubq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
26296 return __arm_vsubq_m_u16 (__inactive
, __a
, __b
, __p
);
26299 __extension__
extern __inline
uint32_t
26300 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26301 __arm_vabavq_p (uint32_t __a
, uint16x8_t __b
, uint16x8_t __c
, mve_pred16_t __p
)
26303 return __arm_vabavq_p_u16 (__a
, __b
, __c
, __p
);
26306 __extension__
extern __inline int16x8_t
26307 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26308 __arm_vshlq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
26310 return __arm_vshlq_m_s16 (__inactive
, __a
, __b
, __p
);
26313 __extension__
extern __inline int32x4_t
26314 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26315 __arm_vsriq_m (int32x4_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
26317 return __arm_vsriq_m_n_s32 (__a
, __b
, __imm
, __p
);
26320 __extension__
extern __inline int32x4_t
26321 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26322 __arm_vsubq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26324 return __arm_vsubq_m_s32 (__inactive
, __a
, __b
, __p
);
26327 __extension__
extern __inline uint32x4_t
26328 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26329 __arm_vqshluq_m (uint32x4_t __inactive
, int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
26331 return __arm_vqshluq_m_n_s32 (__inactive
, __a
, __imm
, __p
);
26334 __extension__
extern __inline
uint32_t
26335 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26336 __arm_vabavq_p (uint32_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
26338 return __arm_vabavq_p_s32 (__a
, __b
, __c
, __p
);
26341 __extension__
extern __inline uint32x4_t
26342 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26343 __arm_vsriq_m (uint32x4_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
26345 return __arm_vsriq_m_n_u32 (__a
, __b
, __imm
, __p
);
26348 __extension__
extern __inline uint32x4_t
26349 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26350 __arm_vshlq_m (uint32x4_t __inactive
, uint32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26352 return __arm_vshlq_m_u32 (__inactive
, __a
, __b
, __p
);
26355 __extension__
extern __inline uint32x4_t
26356 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26357 __arm_vsubq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
26359 return __arm_vsubq_m_u32 (__inactive
, __a
, __b
, __p
);
26362 __extension__
extern __inline
uint32_t
26363 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26364 __arm_vabavq_p (uint32_t __a
, uint32x4_t __b
, uint32x4_t __c
, mve_pred16_t __p
)
26366 return __arm_vabavq_p_u32 (__a
, __b
, __c
, __p
);
26369 __extension__
extern __inline int32x4_t
26370 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26371 __arm_vshlq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26373 return __arm_vshlq_m_s32 (__inactive
, __a
, __b
, __p
);
26376 __extension__
extern __inline int8x16_t
26377 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26378 __arm_vabdq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
26380 return __arm_vabdq_m_s8 (__inactive
, __a
, __b
, __p
);
26383 __extension__
extern __inline int32x4_t
26384 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26385 __arm_vabdq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26387 return __arm_vabdq_m_s32 (__inactive
, __a
, __b
, __p
);
26390 __extension__
extern __inline int16x8_t
26391 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26392 __arm_vabdq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
26394 return __arm_vabdq_m_s16 (__inactive
, __a
, __b
, __p
);
26397 __extension__
extern __inline uint8x16_t
26398 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26399 __arm_vabdq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
26401 return __arm_vabdq_m_u8 (__inactive
, __a
, __b
, __p
);
26404 __extension__
extern __inline uint32x4_t
26405 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26406 __arm_vabdq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
26408 return __arm_vabdq_m_u32 (__inactive
, __a
, __b
, __p
);
26411 __extension__
extern __inline uint16x8_t
26412 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26413 __arm_vabdq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
26415 return __arm_vabdq_m_u16 (__inactive
, __a
, __b
, __p
);
26418 __extension__
extern __inline int8x16_t
26419 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26420 __arm_vaddq_m (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
26422 return __arm_vaddq_m_n_s8 (__inactive
, __a
, __b
, __p
);
26425 __extension__
extern __inline int32x4_t
26426 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26427 __arm_vaddq_m (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
26429 return __arm_vaddq_m_n_s32 (__inactive
, __a
, __b
, __p
);
26432 __extension__
extern __inline int16x8_t
26433 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26434 __arm_vaddq_m (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
26436 return __arm_vaddq_m_n_s16 (__inactive
, __a
, __b
, __p
);
26439 __extension__
extern __inline uint8x16_t
26440 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26441 __arm_vaddq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
26443 return __arm_vaddq_m_n_u8 (__inactive
, __a
, __b
, __p
);
26446 __extension__
extern __inline uint32x4_t
26447 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26448 __arm_vaddq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
26450 return __arm_vaddq_m_n_u32 (__inactive
, __a
, __b
, __p
);
26453 __extension__
extern __inline uint16x8_t
26454 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26455 __arm_vaddq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
26457 return __arm_vaddq_m_n_u16 (__inactive
, __a
, __b
, __p
);
26460 __extension__
extern __inline int8x16_t
26461 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26462 __arm_vaddq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
26464 return __arm_vaddq_m_s8 (__inactive
, __a
, __b
, __p
);
26467 __extension__
extern __inline int32x4_t
26468 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26469 __arm_vaddq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26471 return __arm_vaddq_m_s32 (__inactive
, __a
, __b
, __p
);
26474 __extension__
extern __inline int16x8_t
26475 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26476 __arm_vaddq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
26478 return __arm_vaddq_m_s16 (__inactive
, __a
, __b
, __p
);
26481 __extension__
extern __inline uint8x16_t
26482 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26483 __arm_vaddq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
26485 return __arm_vaddq_m_u8 (__inactive
, __a
, __b
, __p
);
26488 __extension__
extern __inline uint32x4_t
26489 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26490 __arm_vaddq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
26492 return __arm_vaddq_m_u32 (__inactive
, __a
, __b
, __p
);
26495 __extension__
extern __inline uint16x8_t
26496 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26497 __arm_vaddq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
26499 return __arm_vaddq_m_u16 (__inactive
, __a
, __b
, __p
);
26502 __extension__
extern __inline int8x16_t
26503 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26504 __arm_vandq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
26506 return __arm_vandq_m_s8 (__inactive
, __a
, __b
, __p
);
26509 __extension__
extern __inline int32x4_t
26510 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26511 __arm_vandq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26513 return __arm_vandq_m_s32 (__inactive
, __a
, __b
, __p
);
26516 __extension__
extern __inline int16x8_t
26517 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26518 __arm_vandq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
26520 return __arm_vandq_m_s16 (__inactive
, __a
, __b
, __p
);
26523 __extension__
extern __inline uint8x16_t
26524 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26525 __arm_vandq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
26527 return __arm_vandq_m_u8 (__inactive
, __a
, __b
, __p
);
26530 __extension__
extern __inline uint32x4_t
26531 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26532 __arm_vandq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
26534 return __arm_vandq_m_u32 (__inactive
, __a
, __b
, __p
);
26537 __extension__
extern __inline uint16x8_t
26538 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26539 __arm_vandq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
26541 return __arm_vandq_m_u16 (__inactive
, __a
, __b
, __p
);
26544 __extension__
extern __inline int8x16_t
26545 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26546 __arm_vbicq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
26548 return __arm_vbicq_m_s8 (__inactive
, __a
, __b
, __p
);
26551 __extension__
extern __inline int32x4_t
26552 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26553 __arm_vbicq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26555 return __arm_vbicq_m_s32 (__inactive
, __a
, __b
, __p
);
26558 __extension__
extern __inline int16x8_t
26559 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26560 __arm_vbicq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
26562 return __arm_vbicq_m_s16 (__inactive
, __a
, __b
, __p
);
26565 __extension__
extern __inline uint8x16_t
26566 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26567 __arm_vbicq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
26569 return __arm_vbicq_m_u8 (__inactive
, __a
, __b
, __p
);
26572 __extension__
extern __inline uint32x4_t
26573 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26574 __arm_vbicq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
26576 return __arm_vbicq_m_u32 (__inactive
, __a
, __b
, __p
);
26579 __extension__
extern __inline uint16x8_t
26580 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26581 __arm_vbicq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
26583 return __arm_vbicq_m_u16 (__inactive
, __a
, __b
, __p
);
26586 __extension__
extern __inline int8x16_t
26587 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26588 __arm_vbrsrq_m (int8x16_t __inactive
, int8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
26590 return __arm_vbrsrq_m_n_s8 (__inactive
, __a
, __b
, __p
);
26593 __extension__
extern __inline int32x4_t
26594 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26595 __arm_vbrsrq_m (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
26597 return __arm_vbrsrq_m_n_s32 (__inactive
, __a
, __b
, __p
);
26600 __extension__
extern __inline int16x8_t
26601 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26602 __arm_vbrsrq_m (int16x8_t __inactive
, int16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
26604 return __arm_vbrsrq_m_n_s16 (__inactive
, __a
, __b
, __p
);
26607 __extension__
extern __inline uint8x16_t
26608 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26609 __arm_vbrsrq_m (uint8x16_t __inactive
, uint8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
26611 return __arm_vbrsrq_m_n_u8 (__inactive
, __a
, __b
, __p
);
26614 __extension__
extern __inline uint32x4_t
26615 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26616 __arm_vbrsrq_m (uint32x4_t __inactive
, uint32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
26618 return __arm_vbrsrq_m_n_u32 (__inactive
, __a
, __b
, __p
);
26621 __extension__
extern __inline uint16x8_t
26622 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26623 __arm_vbrsrq_m (uint16x8_t __inactive
, uint16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
26625 return __arm_vbrsrq_m_n_u16 (__inactive
, __a
, __b
, __p
);
26628 __extension__
extern __inline int8x16_t
26629 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26630 __arm_vcaddq_rot270_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
26632 return __arm_vcaddq_rot270_m_s8 (__inactive
, __a
, __b
, __p
);
26635 __extension__
extern __inline int32x4_t
26636 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26637 __arm_vcaddq_rot270_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26639 return __arm_vcaddq_rot270_m_s32 (__inactive
, __a
, __b
, __p
);
26642 __extension__
extern __inline int16x8_t
26643 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26644 __arm_vcaddq_rot270_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
26646 return __arm_vcaddq_rot270_m_s16 (__inactive
, __a
, __b
, __p
);
26649 __extension__
extern __inline uint8x16_t
26650 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26651 __arm_vcaddq_rot270_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
26653 return __arm_vcaddq_rot270_m_u8 (__inactive
, __a
, __b
, __p
);
26656 __extension__
extern __inline uint32x4_t
26657 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26658 __arm_vcaddq_rot270_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
26660 return __arm_vcaddq_rot270_m_u32 (__inactive
, __a
, __b
, __p
);
26663 __extension__
extern __inline uint16x8_t
26664 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26665 __arm_vcaddq_rot270_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
26667 return __arm_vcaddq_rot270_m_u16 (__inactive
, __a
, __b
, __p
);
26670 __extension__
extern __inline int8x16_t
26671 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26672 __arm_vcaddq_rot90_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
26674 return __arm_vcaddq_rot90_m_s8 (__inactive
, __a
, __b
, __p
);
26677 __extension__
extern __inline int32x4_t
26678 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26679 __arm_vcaddq_rot90_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26681 return __arm_vcaddq_rot90_m_s32 (__inactive
, __a
, __b
, __p
);
26684 __extension__
extern __inline int16x8_t
26685 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26686 __arm_vcaddq_rot90_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
26688 return __arm_vcaddq_rot90_m_s16 (__inactive
, __a
, __b
, __p
);
26691 __extension__
extern __inline uint8x16_t
26692 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26693 __arm_vcaddq_rot90_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
26695 return __arm_vcaddq_rot90_m_u8 (__inactive
, __a
, __b
, __p
);
26698 __extension__
extern __inline uint32x4_t
26699 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26700 __arm_vcaddq_rot90_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
26702 return __arm_vcaddq_rot90_m_u32 (__inactive
, __a
, __b
, __p
);
26705 __extension__
extern __inline uint16x8_t
26706 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26707 __arm_vcaddq_rot90_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
26709 return __arm_vcaddq_rot90_m_u16 (__inactive
, __a
, __b
, __p
);
26712 __extension__
extern __inline int8x16_t
26713 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26714 __arm_veorq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
26716 return __arm_veorq_m_s8 (__inactive
, __a
, __b
, __p
);
26719 __extension__
extern __inline int32x4_t
26720 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26721 __arm_veorq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26723 return __arm_veorq_m_s32 (__inactive
, __a
, __b
, __p
);
26726 __extension__
extern __inline int16x8_t
26727 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26728 __arm_veorq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
26730 return __arm_veorq_m_s16 (__inactive
, __a
, __b
, __p
);
26733 __extension__
extern __inline uint8x16_t
26734 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26735 __arm_veorq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
26737 return __arm_veorq_m_u8 (__inactive
, __a
, __b
, __p
);
26740 __extension__
extern __inline uint32x4_t
26741 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26742 __arm_veorq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
26744 return __arm_veorq_m_u32 (__inactive
, __a
, __b
, __p
);
26747 __extension__
extern __inline uint16x8_t
26748 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26749 __arm_veorq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
26751 return __arm_veorq_m_u16 (__inactive
, __a
, __b
, __p
);
26754 __extension__
extern __inline int8x16_t
26755 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26756 __arm_vhaddq_m (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
26758 return __arm_vhaddq_m_n_s8 (__inactive
, __a
, __b
, __p
);
26761 __extension__
extern __inline int32x4_t
26762 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26763 __arm_vhaddq_m (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
26765 return __arm_vhaddq_m_n_s32 (__inactive
, __a
, __b
, __p
);
26768 __extension__
extern __inline int16x8_t
26769 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26770 __arm_vhaddq_m (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
26772 return __arm_vhaddq_m_n_s16 (__inactive
, __a
, __b
, __p
);
26775 __extension__
extern __inline uint8x16_t
26776 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26777 __arm_vhaddq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
26779 return __arm_vhaddq_m_n_u8 (__inactive
, __a
, __b
, __p
);
26782 __extension__
extern __inline uint32x4_t
26783 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26784 __arm_vhaddq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
26786 return __arm_vhaddq_m_n_u32 (__inactive
, __a
, __b
, __p
);
26789 __extension__
extern __inline uint16x8_t
26790 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26791 __arm_vhaddq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
26793 return __arm_vhaddq_m_n_u16 (__inactive
, __a
, __b
, __p
);
26796 __extension__
extern __inline int8x16_t
26797 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26798 __arm_vhaddq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
26800 return __arm_vhaddq_m_s8 (__inactive
, __a
, __b
, __p
);
26803 __extension__
extern __inline int32x4_t
26804 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26805 __arm_vhaddq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26807 return __arm_vhaddq_m_s32 (__inactive
, __a
, __b
, __p
);
26810 __extension__
extern __inline int16x8_t
26811 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26812 __arm_vhaddq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
26814 return __arm_vhaddq_m_s16 (__inactive
, __a
, __b
, __p
);
26817 __extension__
extern __inline uint8x16_t
26818 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26819 __arm_vhaddq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
26821 return __arm_vhaddq_m_u8 (__inactive
, __a
, __b
, __p
);
26824 __extension__
extern __inline uint32x4_t
26825 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26826 __arm_vhaddq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
26828 return __arm_vhaddq_m_u32 (__inactive
, __a
, __b
, __p
);
26831 __extension__
extern __inline uint16x8_t
26832 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26833 __arm_vhaddq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
26835 return __arm_vhaddq_m_u16 (__inactive
, __a
, __b
, __p
);
26838 __extension__
extern __inline int8x16_t
26839 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26840 __arm_vhcaddq_rot270_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
26842 return __arm_vhcaddq_rot270_m_s8 (__inactive
, __a
, __b
, __p
);
26845 __extension__
extern __inline int32x4_t
26846 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26847 __arm_vhcaddq_rot270_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26849 return __arm_vhcaddq_rot270_m_s32 (__inactive
, __a
, __b
, __p
);
26852 __extension__
extern __inline int16x8_t
26853 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26854 __arm_vhcaddq_rot270_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
26856 return __arm_vhcaddq_rot270_m_s16 (__inactive
, __a
, __b
, __p
);
26859 __extension__
extern __inline int8x16_t
26860 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26861 __arm_vhcaddq_rot90_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
26863 return __arm_vhcaddq_rot90_m_s8 (__inactive
, __a
, __b
, __p
);
26866 __extension__
extern __inline int32x4_t
26867 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26868 __arm_vhcaddq_rot90_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26870 return __arm_vhcaddq_rot90_m_s32 (__inactive
, __a
, __b
, __p
);
26873 __extension__
extern __inline int16x8_t
26874 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26875 __arm_vhcaddq_rot90_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
26877 return __arm_vhcaddq_rot90_m_s16 (__inactive
, __a
, __b
, __p
);
26880 __extension__
extern __inline int8x16_t
26881 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26882 __arm_vhsubq_m (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
26884 return __arm_vhsubq_m_n_s8 (__inactive
, __a
, __b
, __p
);
26887 __extension__
extern __inline int32x4_t
26888 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26889 __arm_vhsubq_m (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
26891 return __arm_vhsubq_m_n_s32 (__inactive
, __a
, __b
, __p
);
26894 __extension__
extern __inline int16x8_t
26895 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26896 __arm_vhsubq_m (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
26898 return __arm_vhsubq_m_n_s16 (__inactive
, __a
, __b
, __p
);
26901 __extension__
extern __inline uint8x16_t
26902 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26903 __arm_vhsubq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
26905 return __arm_vhsubq_m_n_u8 (__inactive
, __a
, __b
, __p
);
26908 __extension__
extern __inline uint32x4_t
26909 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26910 __arm_vhsubq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
26912 return __arm_vhsubq_m_n_u32 (__inactive
, __a
, __b
, __p
);
26915 __extension__
extern __inline uint16x8_t
26916 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26917 __arm_vhsubq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
26919 return __arm_vhsubq_m_n_u16 (__inactive
, __a
, __b
, __p
);
26922 __extension__
extern __inline int8x16_t
26923 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26924 __arm_vhsubq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
26926 return __arm_vhsubq_m_s8 (__inactive
, __a
, __b
, __p
);
26929 __extension__
extern __inline int32x4_t
26930 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26931 __arm_vhsubq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26933 return __arm_vhsubq_m_s32 (__inactive
, __a
, __b
, __p
);
26936 __extension__
extern __inline int16x8_t
26937 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26938 __arm_vhsubq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
26940 return __arm_vhsubq_m_s16 (__inactive
, __a
, __b
, __p
);
26943 __extension__
extern __inline uint8x16_t
26944 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26945 __arm_vhsubq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
26947 return __arm_vhsubq_m_u8 (__inactive
, __a
, __b
, __p
);
26950 __extension__
extern __inline uint32x4_t
26951 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26952 __arm_vhsubq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
26954 return __arm_vhsubq_m_u32 (__inactive
, __a
, __b
, __p
);
26957 __extension__
extern __inline uint16x8_t
26958 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26959 __arm_vhsubq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
26961 return __arm_vhsubq_m_u16 (__inactive
, __a
, __b
, __p
);
26964 __extension__
extern __inline int8x16_t
26965 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26966 __arm_vmaxq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
26968 return __arm_vmaxq_m_s8 (__inactive
, __a
, __b
, __p
);
26971 __extension__
extern __inline int32x4_t
26972 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26973 __arm_vmaxq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
26975 return __arm_vmaxq_m_s32 (__inactive
, __a
, __b
, __p
);
26978 __extension__
extern __inline int16x8_t
26979 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26980 __arm_vmaxq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
26982 return __arm_vmaxq_m_s16 (__inactive
, __a
, __b
, __p
);
26985 __extension__
extern __inline uint8x16_t
26986 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26987 __arm_vmaxq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
26989 return __arm_vmaxq_m_u8 (__inactive
, __a
, __b
, __p
);
26992 __extension__
extern __inline uint32x4_t
26993 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
26994 __arm_vmaxq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
26996 return __arm_vmaxq_m_u32 (__inactive
, __a
, __b
, __p
);
26999 __extension__
extern __inline uint16x8_t
27000 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27001 __arm_vmaxq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
27003 return __arm_vmaxq_m_u16 (__inactive
, __a
, __b
, __p
);
27006 __extension__
extern __inline int8x16_t
27007 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27008 __arm_vminq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27010 return __arm_vminq_m_s8 (__inactive
, __a
, __b
, __p
);
27013 __extension__
extern __inline int32x4_t
27014 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27015 __arm_vminq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27017 return __arm_vminq_m_s32 (__inactive
, __a
, __b
, __p
);
27020 __extension__
extern __inline int16x8_t
27021 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27022 __arm_vminq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27024 return __arm_vminq_m_s16 (__inactive
, __a
, __b
, __p
);
27027 __extension__
extern __inline uint8x16_t
27028 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27029 __arm_vminq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
27031 return __arm_vminq_m_u8 (__inactive
, __a
, __b
, __p
);
27034 __extension__
extern __inline uint32x4_t
27035 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27036 __arm_vminq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
27038 return __arm_vminq_m_u32 (__inactive
, __a
, __b
, __p
);
27041 __extension__
extern __inline uint16x8_t
27042 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27043 __arm_vminq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
27045 return __arm_vminq_m_u16 (__inactive
, __a
, __b
, __p
);
27048 __extension__
extern __inline
int32_t
27049 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27050 __arm_vmladavaq_p (int32_t __a
, int8x16_t __b
, int8x16_t __c
, mve_pred16_t __p
)
27052 return __arm_vmladavaq_p_s8 (__a
, __b
, __c
, __p
);
27055 __extension__
extern __inline
int32_t
27056 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27057 __arm_vmladavaq_p (int32_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
27059 return __arm_vmladavaq_p_s32 (__a
, __b
, __c
, __p
);
27062 __extension__
extern __inline
int32_t
27063 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27064 __arm_vmladavaq_p (int32_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
27066 return __arm_vmladavaq_p_s16 (__a
, __b
, __c
, __p
);
27069 __extension__
extern __inline
uint32_t
27070 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27071 __arm_vmladavaq_p (uint32_t __a
, uint8x16_t __b
, uint8x16_t __c
, mve_pred16_t __p
)
27073 return __arm_vmladavaq_p_u8 (__a
, __b
, __c
, __p
);
27076 __extension__
extern __inline
uint32_t
27077 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27078 __arm_vmladavaq_p (uint32_t __a
, uint32x4_t __b
, uint32x4_t __c
, mve_pred16_t __p
)
27080 return __arm_vmladavaq_p_u32 (__a
, __b
, __c
, __p
);
27083 __extension__
extern __inline
uint32_t
27084 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27085 __arm_vmladavaq_p (uint32_t __a
, uint16x8_t __b
, uint16x8_t __c
, mve_pred16_t __p
)
27087 return __arm_vmladavaq_p_u16 (__a
, __b
, __c
, __p
);
27090 __extension__
extern __inline
int32_t
27091 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27092 __arm_vmladavaxq_p (int32_t __a
, int8x16_t __b
, int8x16_t __c
, mve_pred16_t __p
)
27094 return __arm_vmladavaxq_p_s8 (__a
, __b
, __c
, __p
);
27097 __extension__
extern __inline
int32_t
27098 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27099 __arm_vmladavaxq_p (int32_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
27101 return __arm_vmladavaxq_p_s32 (__a
, __b
, __c
, __p
);
27104 __extension__
extern __inline
int32_t
27105 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27106 __arm_vmladavaxq_p (int32_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
27108 return __arm_vmladavaxq_p_s16 (__a
, __b
, __c
, __p
);
27111 __extension__
extern __inline int8x16_t
27112 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27113 __arm_vmlaq_m (int8x16_t __a
, int8x16_t __b
, int8_t __c
, mve_pred16_t __p
)
27115 return __arm_vmlaq_m_n_s8 (__a
, __b
, __c
, __p
);
27118 __extension__
extern __inline int32x4_t
27119 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27120 __arm_vmlaq_m (int32x4_t __a
, int32x4_t __b
, int32_t __c
, mve_pred16_t __p
)
27122 return __arm_vmlaq_m_n_s32 (__a
, __b
, __c
, __p
);
27125 __extension__
extern __inline int16x8_t
27126 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27127 __arm_vmlaq_m (int16x8_t __a
, int16x8_t __b
, int16_t __c
, mve_pred16_t __p
)
27129 return __arm_vmlaq_m_n_s16 (__a
, __b
, __c
, __p
);
27132 __extension__
extern __inline uint8x16_t
27133 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27134 __arm_vmlaq_m (uint8x16_t __a
, uint8x16_t __b
, uint8_t __c
, mve_pred16_t __p
)
27136 return __arm_vmlaq_m_n_u8 (__a
, __b
, __c
, __p
);
27139 __extension__
extern __inline uint32x4_t
27140 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27141 __arm_vmlaq_m (uint32x4_t __a
, uint32x4_t __b
, uint32_t __c
, mve_pred16_t __p
)
27143 return __arm_vmlaq_m_n_u32 (__a
, __b
, __c
, __p
);
27146 __extension__
extern __inline uint16x8_t
27147 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27148 __arm_vmlaq_m (uint16x8_t __a
, uint16x8_t __b
, uint16_t __c
, mve_pred16_t __p
)
27150 return __arm_vmlaq_m_n_u16 (__a
, __b
, __c
, __p
);
27153 __extension__
extern __inline int8x16_t
27154 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27155 __arm_vmlasq_m (int8x16_t __a
, int8x16_t __b
, int8_t __c
, mve_pred16_t __p
)
27157 return __arm_vmlasq_m_n_s8 (__a
, __b
, __c
, __p
);
27160 __extension__
extern __inline int32x4_t
27161 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27162 __arm_vmlasq_m (int32x4_t __a
, int32x4_t __b
, int32_t __c
, mve_pred16_t __p
)
27164 return __arm_vmlasq_m_n_s32 (__a
, __b
, __c
, __p
);
27167 __extension__
extern __inline int16x8_t
27168 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27169 __arm_vmlasq_m (int16x8_t __a
, int16x8_t __b
, int16_t __c
, mve_pred16_t __p
)
27171 return __arm_vmlasq_m_n_s16 (__a
, __b
, __c
, __p
);
27174 __extension__
extern __inline uint8x16_t
27175 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27176 __arm_vmlasq_m (uint8x16_t __a
, uint8x16_t __b
, uint8_t __c
, mve_pred16_t __p
)
27178 return __arm_vmlasq_m_n_u8 (__a
, __b
, __c
, __p
);
27181 __extension__
extern __inline uint32x4_t
27182 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27183 __arm_vmlasq_m (uint32x4_t __a
, uint32x4_t __b
, uint32_t __c
, mve_pred16_t __p
)
27185 return __arm_vmlasq_m_n_u32 (__a
, __b
, __c
, __p
);
27188 __extension__
extern __inline uint16x8_t
27189 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27190 __arm_vmlasq_m (uint16x8_t __a
, uint16x8_t __b
, uint16_t __c
, mve_pred16_t __p
)
27192 return __arm_vmlasq_m_n_u16 (__a
, __b
, __c
, __p
);
27195 __extension__
extern __inline
int32_t
27196 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27197 __arm_vmlsdavaq_p (int32_t __a
, int8x16_t __b
, int8x16_t __c
, mve_pred16_t __p
)
27199 return __arm_vmlsdavaq_p_s8 (__a
, __b
, __c
, __p
);
27202 __extension__
extern __inline
int32_t
27203 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27204 __arm_vmlsdavaq_p (int32_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
27206 return __arm_vmlsdavaq_p_s32 (__a
, __b
, __c
, __p
);
27209 __extension__
extern __inline
int32_t
27210 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27211 __arm_vmlsdavaq_p (int32_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
27213 return __arm_vmlsdavaq_p_s16 (__a
, __b
, __c
, __p
);
27216 __extension__
extern __inline
int32_t
27217 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27218 __arm_vmlsdavaxq_p (int32_t __a
, int8x16_t __b
, int8x16_t __c
, mve_pred16_t __p
)
27220 return __arm_vmlsdavaxq_p_s8 (__a
, __b
, __c
, __p
);
27223 __extension__
extern __inline
int32_t
27224 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27225 __arm_vmlsdavaxq_p (int32_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
27227 return __arm_vmlsdavaxq_p_s32 (__a
, __b
, __c
, __p
);
27230 __extension__
extern __inline
int32_t
27231 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27232 __arm_vmlsdavaxq_p (int32_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
27234 return __arm_vmlsdavaxq_p_s16 (__a
, __b
, __c
, __p
);
27237 __extension__
extern __inline int8x16_t
27238 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27239 __arm_vmulhq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27241 return __arm_vmulhq_m_s8 (__inactive
, __a
, __b
, __p
);
27244 __extension__
extern __inline int32x4_t
27245 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27246 __arm_vmulhq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27248 return __arm_vmulhq_m_s32 (__inactive
, __a
, __b
, __p
);
27251 __extension__
extern __inline int16x8_t
27252 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27253 __arm_vmulhq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27255 return __arm_vmulhq_m_s16 (__inactive
, __a
, __b
, __p
);
27258 __extension__
extern __inline uint8x16_t
27259 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27260 __arm_vmulhq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
27262 return __arm_vmulhq_m_u8 (__inactive
, __a
, __b
, __p
);
27265 __extension__
extern __inline uint32x4_t
27266 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27267 __arm_vmulhq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
27269 return __arm_vmulhq_m_u32 (__inactive
, __a
, __b
, __p
);
27272 __extension__
extern __inline uint16x8_t
27273 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27274 __arm_vmulhq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
27276 return __arm_vmulhq_m_u16 (__inactive
, __a
, __b
, __p
);
27279 __extension__
extern __inline int16x8_t
27280 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27281 __arm_vmullbq_int_m (int16x8_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27283 return __arm_vmullbq_int_m_s8 (__inactive
, __a
, __b
, __p
);
27286 __extension__
extern __inline int64x2_t
27287 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27288 __arm_vmullbq_int_m (int64x2_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27290 return __arm_vmullbq_int_m_s32 (__inactive
, __a
, __b
, __p
);
27293 __extension__
extern __inline int32x4_t
27294 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27295 __arm_vmullbq_int_m (int32x4_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27297 return __arm_vmullbq_int_m_s16 (__inactive
, __a
, __b
, __p
);
27300 __extension__
extern __inline uint16x8_t
27301 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27302 __arm_vmullbq_int_m (uint16x8_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
27304 return __arm_vmullbq_int_m_u8 (__inactive
, __a
, __b
, __p
);
27307 __extension__
extern __inline uint64x2_t
27308 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27309 __arm_vmullbq_int_m (uint64x2_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
27311 return __arm_vmullbq_int_m_u32 (__inactive
, __a
, __b
, __p
);
27314 __extension__
extern __inline uint32x4_t
27315 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27316 __arm_vmullbq_int_m (uint32x4_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
27318 return __arm_vmullbq_int_m_u16 (__inactive
, __a
, __b
, __p
);
27321 __extension__
extern __inline int16x8_t
27322 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27323 __arm_vmulltq_int_m (int16x8_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27325 return __arm_vmulltq_int_m_s8 (__inactive
, __a
, __b
, __p
);
27328 __extension__
extern __inline int64x2_t
27329 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27330 __arm_vmulltq_int_m (int64x2_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27332 return __arm_vmulltq_int_m_s32 (__inactive
, __a
, __b
, __p
);
27335 __extension__
extern __inline int32x4_t
27336 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27337 __arm_vmulltq_int_m (int32x4_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27339 return __arm_vmulltq_int_m_s16 (__inactive
, __a
, __b
, __p
);
27342 __extension__
extern __inline uint16x8_t
27343 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27344 __arm_vmulltq_int_m (uint16x8_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
27346 return __arm_vmulltq_int_m_u8 (__inactive
, __a
, __b
, __p
);
27349 __extension__
extern __inline uint64x2_t
27350 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27351 __arm_vmulltq_int_m (uint64x2_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
27353 return __arm_vmulltq_int_m_u32 (__inactive
, __a
, __b
, __p
);
27356 __extension__
extern __inline uint32x4_t
27357 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27358 __arm_vmulltq_int_m (uint32x4_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
27360 return __arm_vmulltq_int_m_u16 (__inactive
, __a
, __b
, __p
);
27363 __extension__
extern __inline int8x16_t
27364 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27365 __arm_vmulq_m (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
27367 return __arm_vmulq_m_n_s8 (__inactive
, __a
, __b
, __p
);
27370 __extension__
extern __inline int32x4_t
27371 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27372 __arm_vmulq_m (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
27374 return __arm_vmulq_m_n_s32 (__inactive
, __a
, __b
, __p
);
27377 __extension__
extern __inline int16x8_t
27378 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27379 __arm_vmulq_m (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
27381 return __arm_vmulq_m_n_s16 (__inactive
, __a
, __b
, __p
);
27384 __extension__
extern __inline uint8x16_t
27385 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27386 __arm_vmulq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
27388 return __arm_vmulq_m_n_u8 (__inactive
, __a
, __b
, __p
);
27391 __extension__
extern __inline uint32x4_t
27392 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27393 __arm_vmulq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
27395 return __arm_vmulq_m_n_u32 (__inactive
, __a
, __b
, __p
);
27398 __extension__
extern __inline uint16x8_t
27399 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27400 __arm_vmulq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
27402 return __arm_vmulq_m_n_u16 (__inactive
, __a
, __b
, __p
);
27405 __extension__
extern __inline int8x16_t
27406 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27407 __arm_vmulq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27409 return __arm_vmulq_m_s8 (__inactive
, __a
, __b
, __p
);
27412 __extension__
extern __inline int32x4_t
27413 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27414 __arm_vmulq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27416 return __arm_vmulq_m_s32 (__inactive
, __a
, __b
, __p
);
27419 __extension__
extern __inline int16x8_t
27420 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27421 __arm_vmulq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27423 return __arm_vmulq_m_s16 (__inactive
, __a
, __b
, __p
);
27426 __extension__
extern __inline uint8x16_t
27427 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27428 __arm_vmulq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
27430 return __arm_vmulq_m_u8 (__inactive
, __a
, __b
, __p
);
27433 __extension__
extern __inline uint32x4_t
27434 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27435 __arm_vmulq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
27437 return __arm_vmulq_m_u32 (__inactive
, __a
, __b
, __p
);
27440 __extension__
extern __inline uint16x8_t
27441 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27442 __arm_vmulq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
27444 return __arm_vmulq_m_u16 (__inactive
, __a
, __b
, __p
);
27447 __extension__
extern __inline int8x16_t
27448 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27449 __arm_vornq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27451 return __arm_vornq_m_s8 (__inactive
, __a
, __b
, __p
);
27454 __extension__
extern __inline int32x4_t
27455 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27456 __arm_vornq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27458 return __arm_vornq_m_s32 (__inactive
, __a
, __b
, __p
);
27461 __extension__
extern __inline int16x8_t
27462 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27463 __arm_vornq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27465 return __arm_vornq_m_s16 (__inactive
, __a
, __b
, __p
);
27468 __extension__
extern __inline uint8x16_t
27469 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27470 __arm_vornq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
27472 return __arm_vornq_m_u8 (__inactive
, __a
, __b
, __p
);
27475 __extension__
extern __inline uint32x4_t
27476 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27477 __arm_vornq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
27479 return __arm_vornq_m_u32 (__inactive
, __a
, __b
, __p
);
27482 __extension__
extern __inline uint16x8_t
27483 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27484 __arm_vornq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
27486 return __arm_vornq_m_u16 (__inactive
, __a
, __b
, __p
);
27489 __extension__
extern __inline int8x16_t
27490 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27491 __arm_vorrq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27493 return __arm_vorrq_m_s8 (__inactive
, __a
, __b
, __p
);
27496 __extension__
extern __inline int32x4_t
27497 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27498 __arm_vorrq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27500 return __arm_vorrq_m_s32 (__inactive
, __a
, __b
, __p
);
27503 __extension__
extern __inline int16x8_t
27504 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27505 __arm_vorrq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27507 return __arm_vorrq_m_s16 (__inactive
, __a
, __b
, __p
);
27510 __extension__
extern __inline uint8x16_t
27511 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27512 __arm_vorrq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
27514 return __arm_vorrq_m_u8 (__inactive
, __a
, __b
, __p
);
27517 __extension__
extern __inline uint32x4_t
27518 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27519 __arm_vorrq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
27521 return __arm_vorrq_m_u32 (__inactive
, __a
, __b
, __p
);
27524 __extension__
extern __inline uint16x8_t
27525 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27526 __arm_vorrq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
27528 return __arm_vorrq_m_u16 (__inactive
, __a
, __b
, __p
);
27531 __extension__
extern __inline int8x16_t
27532 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27533 __arm_vqaddq_m (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
27535 return __arm_vqaddq_m_n_s8 (__inactive
, __a
, __b
, __p
);
27538 __extension__
extern __inline int32x4_t
27539 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27540 __arm_vqaddq_m (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
27542 return __arm_vqaddq_m_n_s32 (__inactive
, __a
, __b
, __p
);
27545 __extension__
extern __inline int16x8_t
27546 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27547 __arm_vqaddq_m (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
27549 return __arm_vqaddq_m_n_s16 (__inactive
, __a
, __b
, __p
);
27552 __extension__
extern __inline uint8x16_t
27553 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27554 __arm_vqaddq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
27556 return __arm_vqaddq_m_n_u8 (__inactive
, __a
, __b
, __p
);
27559 __extension__
extern __inline uint32x4_t
27560 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27561 __arm_vqaddq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
27563 return __arm_vqaddq_m_n_u32 (__inactive
, __a
, __b
, __p
);
27566 __extension__
extern __inline uint16x8_t
27567 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27568 __arm_vqaddq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
27570 return __arm_vqaddq_m_n_u16 (__inactive
, __a
, __b
, __p
);
27573 __extension__
extern __inline int8x16_t
27574 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27575 __arm_vqaddq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27577 return __arm_vqaddq_m_s8 (__inactive
, __a
, __b
, __p
);
27580 __extension__
extern __inline int32x4_t
27581 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27582 __arm_vqaddq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27584 return __arm_vqaddq_m_s32 (__inactive
, __a
, __b
, __p
);
27587 __extension__
extern __inline int16x8_t
27588 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27589 __arm_vqaddq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27591 return __arm_vqaddq_m_s16 (__inactive
, __a
, __b
, __p
);
27594 __extension__
extern __inline uint8x16_t
27595 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27596 __arm_vqaddq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
27598 return __arm_vqaddq_m_u8 (__inactive
, __a
, __b
, __p
);
27601 __extension__
extern __inline uint32x4_t
27602 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27603 __arm_vqaddq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
27605 return __arm_vqaddq_m_u32 (__inactive
, __a
, __b
, __p
);
27608 __extension__
extern __inline uint16x8_t
27609 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27610 __arm_vqaddq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
27612 return __arm_vqaddq_m_u16 (__inactive
, __a
, __b
, __p
);
27615 __extension__
extern __inline int8x16_t
27616 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27617 __arm_vqdmladhq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27619 return __arm_vqdmladhq_m_s8 (__inactive
, __a
, __b
, __p
);
27622 __extension__
extern __inline int32x4_t
27623 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27624 __arm_vqdmladhq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27626 return __arm_vqdmladhq_m_s32 (__inactive
, __a
, __b
, __p
);
27629 __extension__
extern __inline int16x8_t
27630 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27631 __arm_vqdmladhq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27633 return __arm_vqdmladhq_m_s16 (__inactive
, __a
, __b
, __p
);
27636 __extension__
extern __inline int8x16_t
27637 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27638 __arm_vqdmladhxq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27640 return __arm_vqdmladhxq_m_s8 (__inactive
, __a
, __b
, __p
);
27643 __extension__
extern __inline int32x4_t
27644 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27645 __arm_vqdmladhxq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27647 return __arm_vqdmladhxq_m_s32 (__inactive
, __a
, __b
, __p
);
27650 __extension__
extern __inline int16x8_t
27651 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27652 __arm_vqdmladhxq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27654 return __arm_vqdmladhxq_m_s16 (__inactive
, __a
, __b
, __p
);
27657 __extension__
extern __inline int8x16_t
27658 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27659 __arm_vqdmlahq_m (int8x16_t __a
, int8x16_t __b
, int8_t __c
, mve_pred16_t __p
)
27661 return __arm_vqdmlahq_m_n_s8 (__a
, __b
, __c
, __p
);
27664 __extension__
extern __inline int32x4_t
27665 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27666 __arm_vqdmlahq_m (int32x4_t __a
, int32x4_t __b
, int32_t __c
, mve_pred16_t __p
)
27668 return __arm_vqdmlahq_m_n_s32 (__a
, __b
, __c
, __p
);
27671 __extension__
extern __inline int16x8_t
27672 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27673 __arm_vqdmlahq_m (int16x8_t __a
, int16x8_t __b
, int16_t __c
, mve_pred16_t __p
)
27675 return __arm_vqdmlahq_m_n_s16 (__a
, __b
, __c
, __p
);
27678 __extension__
extern __inline int8x16_t
27679 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27680 __arm_vqdmlsdhq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27682 return __arm_vqdmlsdhq_m_s8 (__inactive
, __a
, __b
, __p
);
27685 __extension__
extern __inline int32x4_t
27686 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27687 __arm_vqdmlsdhq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27689 return __arm_vqdmlsdhq_m_s32 (__inactive
, __a
, __b
, __p
);
27692 __extension__
extern __inline int16x8_t
27693 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27694 __arm_vqdmlsdhq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27696 return __arm_vqdmlsdhq_m_s16 (__inactive
, __a
, __b
, __p
);
27699 __extension__
extern __inline int8x16_t
27700 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27701 __arm_vqdmlsdhxq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27703 return __arm_vqdmlsdhxq_m_s8 (__inactive
, __a
, __b
, __p
);
27706 __extension__
extern __inline int32x4_t
27707 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27708 __arm_vqdmlsdhxq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27710 return __arm_vqdmlsdhxq_m_s32 (__inactive
, __a
, __b
, __p
);
27713 __extension__
extern __inline int16x8_t
27714 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27715 __arm_vqdmlsdhxq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27717 return __arm_vqdmlsdhxq_m_s16 (__inactive
, __a
, __b
, __p
);
27720 __extension__
extern __inline int8x16_t
27721 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27722 __arm_vqdmulhq_m (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
27724 return __arm_vqdmulhq_m_n_s8 (__inactive
, __a
, __b
, __p
);
27727 __extension__
extern __inline int32x4_t
27728 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27729 __arm_vqdmulhq_m (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
27731 return __arm_vqdmulhq_m_n_s32 (__inactive
, __a
, __b
, __p
);
27734 __extension__
extern __inline int16x8_t
27735 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27736 __arm_vqdmulhq_m (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
27738 return __arm_vqdmulhq_m_n_s16 (__inactive
, __a
, __b
, __p
);
27741 __extension__
extern __inline int8x16_t
27742 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27743 __arm_vqdmulhq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27745 return __arm_vqdmulhq_m_s8 (__inactive
, __a
, __b
, __p
);
27748 __extension__
extern __inline int32x4_t
27749 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27750 __arm_vqdmulhq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27752 return __arm_vqdmulhq_m_s32 (__inactive
, __a
, __b
, __p
);
27755 __extension__
extern __inline int16x8_t
27756 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27757 __arm_vqdmulhq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27759 return __arm_vqdmulhq_m_s16 (__inactive
, __a
, __b
, __p
);
27762 __extension__
extern __inline int8x16_t
27763 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27764 __arm_vqrdmladhq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27766 return __arm_vqrdmladhq_m_s8 (__inactive
, __a
, __b
, __p
);
27769 __extension__
extern __inline int32x4_t
27770 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27771 __arm_vqrdmladhq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27773 return __arm_vqrdmladhq_m_s32 (__inactive
, __a
, __b
, __p
);
27776 __extension__
extern __inline int16x8_t
27777 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27778 __arm_vqrdmladhq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27780 return __arm_vqrdmladhq_m_s16 (__inactive
, __a
, __b
, __p
);
27783 __extension__
extern __inline int8x16_t
27784 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27785 __arm_vqrdmladhxq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27787 return __arm_vqrdmladhxq_m_s8 (__inactive
, __a
, __b
, __p
);
27790 __extension__
extern __inline int32x4_t
27791 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27792 __arm_vqrdmladhxq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27794 return __arm_vqrdmladhxq_m_s32 (__inactive
, __a
, __b
, __p
);
27797 __extension__
extern __inline int16x8_t
27798 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27799 __arm_vqrdmladhxq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27801 return __arm_vqrdmladhxq_m_s16 (__inactive
, __a
, __b
, __p
);
27804 __extension__
extern __inline int8x16_t
27805 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27806 __arm_vqrdmlahq_m (int8x16_t __a
, int8x16_t __b
, int8_t __c
, mve_pred16_t __p
)
27808 return __arm_vqrdmlahq_m_n_s8 (__a
, __b
, __c
, __p
);
27811 __extension__
extern __inline int32x4_t
27812 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27813 __arm_vqrdmlahq_m (int32x4_t __a
, int32x4_t __b
, int32_t __c
, mve_pred16_t __p
)
27815 return __arm_vqrdmlahq_m_n_s32 (__a
, __b
, __c
, __p
);
27818 __extension__
extern __inline int16x8_t
27819 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27820 __arm_vqrdmlahq_m (int16x8_t __a
, int16x8_t __b
, int16_t __c
, mve_pred16_t __p
)
27822 return __arm_vqrdmlahq_m_n_s16 (__a
, __b
, __c
, __p
);
27825 __extension__
extern __inline int8x16_t
27826 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27827 __arm_vqrdmlashq_m (int8x16_t __a
, int8x16_t __b
, int8_t __c
, mve_pred16_t __p
)
27829 return __arm_vqrdmlashq_m_n_s8 (__a
, __b
, __c
, __p
);
27832 __extension__
extern __inline int32x4_t
27833 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27834 __arm_vqrdmlashq_m (int32x4_t __a
, int32x4_t __b
, int32_t __c
, mve_pred16_t __p
)
27836 return __arm_vqrdmlashq_m_n_s32 (__a
, __b
, __c
, __p
);
27839 __extension__
extern __inline int16x8_t
27840 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27841 __arm_vqrdmlashq_m (int16x8_t __a
, int16x8_t __b
, int16_t __c
, mve_pred16_t __p
)
27843 return __arm_vqrdmlashq_m_n_s16 (__a
, __b
, __c
, __p
);
27846 __extension__
extern __inline int8x16_t
27847 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27848 __arm_vqdmlashq_m (int8x16_t __a
, int8x16_t __b
, int8_t __c
, mve_pred16_t __p
)
27850 return __arm_vqdmlashq_m_n_s8 (__a
, __b
, __c
, __p
);
27853 __extension__
extern __inline int16x8_t
27854 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27855 __arm_vqdmlashq_m (int16x8_t __a
, int16x8_t __b
, int16_t __c
, mve_pred16_t __p
)
27857 return __arm_vqdmlashq_m_n_s16 (__a
, __b
, __c
, __p
);
27860 __extension__
extern __inline int32x4_t
27861 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27862 __arm_vqdmlashq_m (int32x4_t __a
, int32x4_t __b
, int32_t __c
, mve_pred16_t __p
)
27864 return __arm_vqdmlashq_m_n_s32 (__a
, __b
, __c
, __p
);
27867 __extension__
extern __inline int8x16_t
27868 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27869 __arm_vqrdmlsdhq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27871 return __arm_vqrdmlsdhq_m_s8 (__inactive
, __a
, __b
, __p
);
27874 __extension__
extern __inline int32x4_t
27875 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27876 __arm_vqrdmlsdhq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27878 return __arm_vqrdmlsdhq_m_s32 (__inactive
, __a
, __b
, __p
);
27881 __extension__
extern __inline int16x8_t
27882 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27883 __arm_vqrdmlsdhq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27885 return __arm_vqrdmlsdhq_m_s16 (__inactive
, __a
, __b
, __p
);
27888 __extension__
extern __inline int8x16_t
27889 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27890 __arm_vqrdmlsdhxq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27892 return __arm_vqrdmlsdhxq_m_s8 (__inactive
, __a
, __b
, __p
);
27895 __extension__
extern __inline int32x4_t
27896 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27897 __arm_vqrdmlsdhxq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27899 return __arm_vqrdmlsdhxq_m_s32 (__inactive
, __a
, __b
, __p
);
27902 __extension__
extern __inline int16x8_t
27903 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27904 __arm_vqrdmlsdhxq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27906 return __arm_vqrdmlsdhxq_m_s16 (__inactive
, __a
, __b
, __p
);
27909 __extension__
extern __inline int8x16_t
27910 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27911 __arm_vqrdmulhq_m (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
27913 return __arm_vqrdmulhq_m_n_s8 (__inactive
, __a
, __b
, __p
);
27916 __extension__
extern __inline int32x4_t
27917 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27918 __arm_vqrdmulhq_m (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
27920 return __arm_vqrdmulhq_m_n_s32 (__inactive
, __a
, __b
, __p
);
27923 __extension__
extern __inline int16x8_t
27924 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27925 __arm_vqrdmulhq_m (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
27927 return __arm_vqrdmulhq_m_n_s16 (__inactive
, __a
, __b
, __p
);
27930 __extension__
extern __inline int8x16_t
27931 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27932 __arm_vqrdmulhq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27934 return __arm_vqrdmulhq_m_s8 (__inactive
, __a
, __b
, __p
);
27937 __extension__
extern __inline int32x4_t
27938 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27939 __arm_vqrdmulhq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27941 return __arm_vqrdmulhq_m_s32 (__inactive
, __a
, __b
, __p
);
27944 __extension__
extern __inline int16x8_t
27945 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27946 __arm_vqrdmulhq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27948 return __arm_vqrdmulhq_m_s16 (__inactive
, __a
, __b
, __p
);
27951 __extension__
extern __inline int8x16_t
27952 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27953 __arm_vqrshlq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27955 return __arm_vqrshlq_m_s8 (__inactive
, __a
, __b
, __p
);
27958 __extension__
extern __inline int32x4_t
27959 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27960 __arm_vqrshlq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27962 return __arm_vqrshlq_m_s32 (__inactive
, __a
, __b
, __p
);
27965 __extension__
extern __inline int16x8_t
27966 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27967 __arm_vqrshlq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27969 return __arm_vqrshlq_m_s16 (__inactive
, __a
, __b
, __p
);
27972 __extension__
extern __inline uint8x16_t
27973 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27974 __arm_vqrshlq_m (uint8x16_t __inactive
, uint8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
27976 return __arm_vqrshlq_m_u8 (__inactive
, __a
, __b
, __p
);
27979 __extension__
extern __inline uint32x4_t
27980 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27981 __arm_vqrshlq_m (uint32x4_t __inactive
, uint32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
27983 return __arm_vqrshlq_m_u32 (__inactive
, __a
, __b
, __p
);
27986 __extension__
extern __inline uint16x8_t
27987 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27988 __arm_vqrshlq_m (uint16x8_t __inactive
, uint16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
27990 return __arm_vqrshlq_m_u16 (__inactive
, __a
, __b
, __p
);
27993 __extension__
extern __inline int8x16_t
27994 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
27995 __arm_vqshlq_m_n (int8x16_t __inactive
, int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
27997 return __arm_vqshlq_m_n_s8 (__inactive
, __a
, __imm
, __p
);
28000 __extension__
extern __inline int32x4_t
28001 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28002 __arm_vqshlq_m_n (int32x4_t __inactive
, int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
28004 return __arm_vqshlq_m_n_s32 (__inactive
, __a
, __imm
, __p
);
28007 __extension__
extern __inline int16x8_t
28008 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28009 __arm_vqshlq_m_n (int16x8_t __inactive
, int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
28011 return __arm_vqshlq_m_n_s16 (__inactive
, __a
, __imm
, __p
);
28014 __extension__
extern __inline uint8x16_t
28015 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28016 __arm_vqshlq_m_n (uint8x16_t __inactive
, uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
28018 return __arm_vqshlq_m_n_u8 (__inactive
, __a
, __imm
, __p
);
28021 __extension__
extern __inline uint32x4_t
28022 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28023 __arm_vqshlq_m_n (uint32x4_t __inactive
, uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
28025 return __arm_vqshlq_m_n_u32 (__inactive
, __a
, __imm
, __p
);
28028 __extension__
extern __inline uint16x8_t
28029 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28030 __arm_vqshlq_m_n (uint16x8_t __inactive
, uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
28032 return __arm_vqshlq_m_n_u16 (__inactive
, __a
, __imm
, __p
);
28035 __extension__
extern __inline int8x16_t
28036 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28037 __arm_vqshlq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
28039 return __arm_vqshlq_m_s8 (__inactive
, __a
, __b
, __p
);
28042 __extension__
extern __inline int32x4_t
28043 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28044 __arm_vqshlq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
28046 return __arm_vqshlq_m_s32 (__inactive
, __a
, __b
, __p
);
28049 __extension__
extern __inline int16x8_t
28050 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28051 __arm_vqshlq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
28053 return __arm_vqshlq_m_s16 (__inactive
, __a
, __b
, __p
);
28056 __extension__
extern __inline uint8x16_t
28057 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28058 __arm_vqshlq_m (uint8x16_t __inactive
, uint8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
28060 return __arm_vqshlq_m_u8 (__inactive
, __a
, __b
, __p
);
28063 __extension__
extern __inline uint32x4_t
28064 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28065 __arm_vqshlq_m (uint32x4_t __inactive
, uint32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
28067 return __arm_vqshlq_m_u32 (__inactive
, __a
, __b
, __p
);
28070 __extension__
extern __inline uint16x8_t
28071 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28072 __arm_vqshlq_m (uint16x8_t __inactive
, uint16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
28074 return __arm_vqshlq_m_u16 (__inactive
, __a
, __b
, __p
);
28077 __extension__
extern __inline int8x16_t
28078 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28079 __arm_vqsubq_m (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
28081 return __arm_vqsubq_m_n_s8 (__inactive
, __a
, __b
, __p
);
28084 __extension__
extern __inline int32x4_t
28085 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28086 __arm_vqsubq_m (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
28088 return __arm_vqsubq_m_n_s32 (__inactive
, __a
, __b
, __p
);
28091 __extension__
extern __inline int16x8_t
28092 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28093 __arm_vqsubq_m (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
28095 return __arm_vqsubq_m_n_s16 (__inactive
, __a
, __b
, __p
);
28098 __extension__
extern __inline uint8x16_t
28099 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28100 __arm_vqsubq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
28102 return __arm_vqsubq_m_n_u8 (__inactive
, __a
, __b
, __p
);
28105 __extension__
extern __inline uint32x4_t
28106 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28107 __arm_vqsubq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
28109 return __arm_vqsubq_m_n_u32 (__inactive
, __a
, __b
, __p
);
28112 __extension__
extern __inline uint16x8_t
28113 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28114 __arm_vqsubq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
28116 return __arm_vqsubq_m_n_u16 (__inactive
, __a
, __b
, __p
);
28119 __extension__
extern __inline int8x16_t
28120 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28121 __arm_vqsubq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
28123 return __arm_vqsubq_m_s8 (__inactive
, __a
, __b
, __p
);
28126 __extension__
extern __inline int32x4_t
28127 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28128 __arm_vqsubq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
28130 return __arm_vqsubq_m_s32 (__inactive
, __a
, __b
, __p
);
28133 __extension__
extern __inline int16x8_t
28134 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28135 __arm_vqsubq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
28137 return __arm_vqsubq_m_s16 (__inactive
, __a
, __b
, __p
);
28140 __extension__
extern __inline uint8x16_t
28141 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28142 __arm_vqsubq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
28144 return __arm_vqsubq_m_u8 (__inactive
, __a
, __b
, __p
);
28147 __extension__
extern __inline uint32x4_t
28148 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28149 __arm_vqsubq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
28151 return __arm_vqsubq_m_u32 (__inactive
, __a
, __b
, __p
);
28154 __extension__
extern __inline uint16x8_t
28155 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28156 __arm_vqsubq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
28158 return __arm_vqsubq_m_u16 (__inactive
, __a
, __b
, __p
);
28161 __extension__
extern __inline int8x16_t
28162 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28163 __arm_vrhaddq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
28165 return __arm_vrhaddq_m_s8 (__inactive
, __a
, __b
, __p
);
28168 __extension__
extern __inline int32x4_t
28169 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28170 __arm_vrhaddq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
28172 return __arm_vrhaddq_m_s32 (__inactive
, __a
, __b
, __p
);
28175 __extension__
extern __inline int16x8_t
28176 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28177 __arm_vrhaddq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
28179 return __arm_vrhaddq_m_s16 (__inactive
, __a
, __b
, __p
);
28182 __extension__
extern __inline uint8x16_t
28183 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28184 __arm_vrhaddq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
28186 return __arm_vrhaddq_m_u8 (__inactive
, __a
, __b
, __p
);
28189 __extension__
extern __inline uint32x4_t
28190 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28191 __arm_vrhaddq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
28193 return __arm_vrhaddq_m_u32 (__inactive
, __a
, __b
, __p
);
28196 __extension__
extern __inline uint16x8_t
28197 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28198 __arm_vrhaddq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
28200 return __arm_vrhaddq_m_u16 (__inactive
, __a
, __b
, __p
);
28203 __extension__
extern __inline int8x16_t
28204 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28205 __arm_vrmulhq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
28207 return __arm_vrmulhq_m_s8 (__inactive
, __a
, __b
, __p
);
28210 __extension__
extern __inline int32x4_t
28211 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28212 __arm_vrmulhq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
28214 return __arm_vrmulhq_m_s32 (__inactive
, __a
, __b
, __p
);
28217 __extension__
extern __inline int16x8_t
28218 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28219 __arm_vrmulhq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
28221 return __arm_vrmulhq_m_s16 (__inactive
, __a
, __b
, __p
);
28224 __extension__
extern __inline uint8x16_t
28225 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28226 __arm_vrmulhq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
28228 return __arm_vrmulhq_m_u8 (__inactive
, __a
, __b
, __p
);
28231 __extension__
extern __inline uint32x4_t
28232 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28233 __arm_vrmulhq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
28235 return __arm_vrmulhq_m_u32 (__inactive
, __a
, __b
, __p
);
28238 __extension__
extern __inline uint16x8_t
28239 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28240 __arm_vrmulhq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
28242 return __arm_vrmulhq_m_u16 (__inactive
, __a
, __b
, __p
);
28245 __extension__
extern __inline int8x16_t
28246 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28247 __arm_vrshlq_m (int8x16_t __inactive
, int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
28249 return __arm_vrshlq_m_s8 (__inactive
, __a
, __b
, __p
);
28252 __extension__
extern __inline int32x4_t
28253 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28254 __arm_vrshlq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
28256 return __arm_vrshlq_m_s32 (__inactive
, __a
, __b
, __p
);
28259 __extension__
extern __inline int16x8_t
28260 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28261 __arm_vrshlq_m (int16x8_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
28263 return __arm_vrshlq_m_s16 (__inactive
, __a
, __b
, __p
);
28266 __extension__
extern __inline uint8x16_t
28267 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28268 __arm_vrshlq_m (uint8x16_t __inactive
, uint8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
28270 return __arm_vrshlq_m_u8 (__inactive
, __a
, __b
, __p
);
28273 __extension__
extern __inline uint32x4_t
28274 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28275 __arm_vrshlq_m (uint32x4_t __inactive
, uint32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
28277 return __arm_vrshlq_m_u32 (__inactive
, __a
, __b
, __p
);
28280 __extension__
extern __inline uint16x8_t
28281 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28282 __arm_vrshlq_m (uint16x8_t __inactive
, uint16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
28284 return __arm_vrshlq_m_u16 (__inactive
, __a
, __b
, __p
);
28287 __extension__
extern __inline int8x16_t
28288 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28289 __arm_vrshrq_m (int8x16_t __inactive
, int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
28291 return __arm_vrshrq_m_n_s8 (__inactive
, __a
, __imm
, __p
);
28294 __extension__
extern __inline int32x4_t
28295 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28296 __arm_vrshrq_m (int32x4_t __inactive
, int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
28298 return __arm_vrshrq_m_n_s32 (__inactive
, __a
, __imm
, __p
);
28301 __extension__
extern __inline int16x8_t
28302 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28303 __arm_vrshrq_m (int16x8_t __inactive
, int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
28305 return __arm_vrshrq_m_n_s16 (__inactive
, __a
, __imm
, __p
);
28308 __extension__
extern __inline uint8x16_t
28309 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28310 __arm_vrshrq_m (uint8x16_t __inactive
, uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
28312 return __arm_vrshrq_m_n_u8 (__inactive
, __a
, __imm
, __p
);
28315 __extension__
extern __inline uint32x4_t
28316 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28317 __arm_vrshrq_m (uint32x4_t __inactive
, uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
28319 return __arm_vrshrq_m_n_u32 (__inactive
, __a
, __imm
, __p
);
28322 __extension__
extern __inline uint16x8_t
28323 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28324 __arm_vrshrq_m (uint16x8_t __inactive
, uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
28326 return __arm_vrshrq_m_n_u16 (__inactive
, __a
, __imm
, __p
);
28329 __extension__
extern __inline int8x16_t
28330 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28331 __arm_vshlq_m_n (int8x16_t __inactive
, int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
28333 return __arm_vshlq_m_n_s8 (__inactive
, __a
, __imm
, __p
);
28336 __extension__
extern __inline int32x4_t
28337 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28338 __arm_vshlq_m_n (int32x4_t __inactive
, int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
28340 return __arm_vshlq_m_n_s32 (__inactive
, __a
, __imm
, __p
);
28343 __extension__
extern __inline int16x8_t
28344 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28345 __arm_vshlq_m_n (int16x8_t __inactive
, int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
28347 return __arm_vshlq_m_n_s16 (__inactive
, __a
, __imm
, __p
);
28350 __extension__
extern __inline uint8x16_t
28351 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28352 __arm_vshlq_m_n (uint8x16_t __inactive
, uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
28354 return __arm_vshlq_m_n_u8 (__inactive
, __a
, __imm
, __p
);
28357 __extension__
extern __inline uint32x4_t
28358 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28359 __arm_vshlq_m_n (uint32x4_t __inactive
, uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
28361 return __arm_vshlq_m_n_u32 (__inactive
, __a
, __imm
, __p
);
28364 __extension__
extern __inline uint16x8_t
28365 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28366 __arm_vshlq_m_n (uint16x8_t __inactive
, uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
28368 return __arm_vshlq_m_n_u16 (__inactive
, __a
, __imm
, __p
);
28371 __extension__
extern __inline int8x16_t
28372 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28373 __arm_vshrq_m (int8x16_t __inactive
, int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
28375 return __arm_vshrq_m_n_s8 (__inactive
, __a
, __imm
, __p
);
28378 __extension__
extern __inline int32x4_t
28379 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28380 __arm_vshrq_m (int32x4_t __inactive
, int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
28382 return __arm_vshrq_m_n_s32 (__inactive
, __a
, __imm
, __p
);
28385 __extension__
extern __inline int16x8_t
28386 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28387 __arm_vshrq_m (int16x8_t __inactive
, int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
28389 return __arm_vshrq_m_n_s16 (__inactive
, __a
, __imm
, __p
);
28392 __extension__
extern __inline uint8x16_t
28393 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28394 __arm_vshrq_m (uint8x16_t __inactive
, uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
28396 return __arm_vshrq_m_n_u8 (__inactive
, __a
, __imm
, __p
);
28399 __extension__
extern __inline uint32x4_t
28400 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28401 __arm_vshrq_m (uint32x4_t __inactive
, uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
28403 return __arm_vshrq_m_n_u32 (__inactive
, __a
, __imm
, __p
);
28406 __extension__
extern __inline uint16x8_t
28407 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28408 __arm_vshrq_m (uint16x8_t __inactive
, uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
28410 return __arm_vshrq_m_n_u16 (__inactive
, __a
, __imm
, __p
);
28413 __extension__
extern __inline int8x16_t
28414 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28415 __arm_vsliq_m (int8x16_t __a
, int8x16_t __b
, const int __imm
, mve_pred16_t __p
)
28417 return __arm_vsliq_m_n_s8 (__a
, __b
, __imm
, __p
);
28420 __extension__
extern __inline int32x4_t
28421 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28422 __arm_vsliq_m (int32x4_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28424 return __arm_vsliq_m_n_s32 (__a
, __b
, __imm
, __p
);
28427 __extension__
extern __inline int16x8_t
28428 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28429 __arm_vsliq_m (int16x8_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28431 return __arm_vsliq_m_n_s16 (__a
, __b
, __imm
, __p
);
28434 __extension__
extern __inline uint8x16_t
28435 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28436 __arm_vsliq_m (uint8x16_t __a
, uint8x16_t __b
, const int __imm
, mve_pred16_t __p
)
28438 return __arm_vsliq_m_n_u8 (__a
, __b
, __imm
, __p
);
28441 __extension__
extern __inline uint32x4_t
28442 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28443 __arm_vsliq_m (uint32x4_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28445 return __arm_vsliq_m_n_u32 (__a
, __b
, __imm
, __p
);
28448 __extension__
extern __inline uint16x8_t
28449 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28450 __arm_vsliq_m (uint16x8_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28452 return __arm_vsliq_m_n_u16 (__a
, __b
, __imm
, __p
);
28455 __extension__
extern __inline int8x16_t
28456 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28457 __arm_vsubq_m (int8x16_t __inactive
, int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
28459 return __arm_vsubq_m_n_s8 (__inactive
, __a
, __b
, __p
);
28462 __extension__
extern __inline int32x4_t
28463 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28464 __arm_vsubq_m (int32x4_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
28466 return __arm_vsubq_m_n_s32 (__inactive
, __a
, __b
, __p
);
28469 __extension__
extern __inline int16x8_t
28470 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28471 __arm_vsubq_m (int16x8_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
28473 return __arm_vsubq_m_n_s16 (__inactive
, __a
, __b
, __p
);
28476 __extension__
extern __inline uint8x16_t
28477 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28478 __arm_vsubq_m (uint8x16_t __inactive
, uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
28480 return __arm_vsubq_m_n_u8 (__inactive
, __a
, __b
, __p
);
28483 __extension__
extern __inline uint32x4_t
28484 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28485 __arm_vsubq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
28487 return __arm_vsubq_m_n_u32 (__inactive
, __a
, __b
, __p
);
28490 __extension__
extern __inline uint16x8_t
28491 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28492 __arm_vsubq_m (uint16x8_t __inactive
, uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
28494 return __arm_vsubq_m_n_u16 (__inactive
, __a
, __b
, __p
);
28497 __extension__
extern __inline
int64_t
28498 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28499 __arm_vmlaldavaq_p (int64_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
28501 return __arm_vmlaldavaq_p_s32 (__a
, __b
, __c
, __p
);
28504 __extension__
extern __inline
int64_t
28505 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28506 __arm_vmlaldavaq_p (int64_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
28508 return __arm_vmlaldavaq_p_s16 (__a
, __b
, __c
, __p
);
28511 __extension__
extern __inline
uint64_t
28512 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28513 __arm_vmlaldavaq_p (uint64_t __a
, uint32x4_t __b
, uint32x4_t __c
, mve_pred16_t __p
)
28515 return __arm_vmlaldavaq_p_u32 (__a
, __b
, __c
, __p
);
28518 __extension__
extern __inline
uint64_t
28519 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28520 __arm_vmlaldavaq_p (uint64_t __a
, uint16x8_t __b
, uint16x8_t __c
, mve_pred16_t __p
)
28522 return __arm_vmlaldavaq_p_u16 (__a
, __b
, __c
, __p
);
28525 __extension__
extern __inline
int64_t
28526 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28527 __arm_vmlaldavaxq_p (int64_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
28529 return __arm_vmlaldavaxq_p_s32 (__a
, __b
, __c
, __p
);
28532 __extension__
extern __inline
int64_t
28533 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28534 __arm_vmlaldavaxq_p (int64_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
28536 return __arm_vmlaldavaxq_p_s16 (__a
, __b
, __c
, __p
);
28539 __extension__
extern __inline
int64_t
28540 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28541 __arm_vmlsldavaq_p (int64_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
28543 return __arm_vmlsldavaq_p_s32 (__a
, __b
, __c
, __p
);
28546 __extension__
extern __inline
int64_t
28547 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28548 __arm_vmlsldavaq_p (int64_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
28550 return __arm_vmlsldavaq_p_s16 (__a
, __b
, __c
, __p
);
28553 __extension__
extern __inline
int64_t
28554 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28555 __arm_vmlsldavaxq_p (int64_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
28557 return __arm_vmlsldavaxq_p_s32 (__a
, __b
, __c
, __p
);
28560 __extension__
extern __inline
int64_t
28561 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28562 __arm_vmlsldavaxq_p (int64_t __a
, int16x8_t __b
, int16x8_t __c
, mve_pred16_t __p
)
28564 return __arm_vmlsldavaxq_p_s16 (__a
, __b
, __c
, __p
);
28567 __extension__
extern __inline uint16x8_t
28568 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28569 __arm_vmullbq_poly_m (uint16x8_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
28571 return __arm_vmullbq_poly_m_p8 (__inactive
, __a
, __b
, __p
);
28574 __extension__
extern __inline uint32x4_t
28575 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28576 __arm_vmullbq_poly_m (uint32x4_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
28578 return __arm_vmullbq_poly_m_p16 (__inactive
, __a
, __b
, __p
);
28581 __extension__
extern __inline uint16x8_t
28582 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28583 __arm_vmulltq_poly_m (uint16x8_t __inactive
, uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
28585 return __arm_vmulltq_poly_m_p8 (__inactive
, __a
, __b
, __p
);
28588 __extension__
extern __inline uint32x4_t
28589 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28590 __arm_vmulltq_poly_m (uint32x4_t __inactive
, uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
28592 return __arm_vmulltq_poly_m_p16 (__inactive
, __a
, __b
, __p
);
28595 __extension__
extern __inline int64x2_t
28596 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28597 __arm_vqdmullbq_m (int64x2_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
28599 return __arm_vqdmullbq_m_n_s32 (__inactive
, __a
, __b
, __p
);
28602 __extension__
extern __inline int32x4_t
28603 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28604 __arm_vqdmullbq_m (int32x4_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
28606 return __arm_vqdmullbq_m_n_s16 (__inactive
, __a
, __b
, __p
);
28609 __extension__
extern __inline int64x2_t
28610 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28611 __arm_vqdmullbq_m (int64x2_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
28613 return __arm_vqdmullbq_m_s32 (__inactive
, __a
, __b
, __p
);
28616 __extension__
extern __inline int32x4_t
28617 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28618 __arm_vqdmullbq_m (int32x4_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
28620 return __arm_vqdmullbq_m_s16 (__inactive
, __a
, __b
, __p
);
28623 __extension__
extern __inline int64x2_t
28624 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28625 __arm_vqdmulltq_m (int64x2_t __inactive
, int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
28627 return __arm_vqdmulltq_m_n_s32 (__inactive
, __a
, __b
, __p
);
28630 __extension__
extern __inline int32x4_t
28631 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28632 __arm_vqdmulltq_m (int32x4_t __inactive
, int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
28634 return __arm_vqdmulltq_m_n_s16 (__inactive
, __a
, __b
, __p
);
28637 __extension__
extern __inline int64x2_t
28638 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28639 __arm_vqdmulltq_m (int64x2_t __inactive
, int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
28641 return __arm_vqdmulltq_m_s32 (__inactive
, __a
, __b
, __p
);
28644 __extension__
extern __inline int32x4_t
28645 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28646 __arm_vqdmulltq_m (int32x4_t __inactive
, int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
28648 return __arm_vqdmulltq_m_s16 (__inactive
, __a
, __b
, __p
);
28651 __extension__
extern __inline int16x8_t
28652 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28653 __arm_vqrshrnbq_m (int16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28655 return __arm_vqrshrnbq_m_n_s32 (__a
, __b
, __imm
, __p
);
28658 __extension__
extern __inline int8x16_t
28659 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28660 __arm_vqrshrnbq_m (int8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28662 return __arm_vqrshrnbq_m_n_s16 (__a
, __b
, __imm
, __p
);
28665 __extension__
extern __inline uint16x8_t
28666 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28667 __arm_vqrshrnbq_m (uint16x8_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28669 return __arm_vqrshrnbq_m_n_u32 (__a
, __b
, __imm
, __p
);
28672 __extension__
extern __inline uint8x16_t
28673 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28674 __arm_vqrshrnbq_m (uint8x16_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28676 return __arm_vqrshrnbq_m_n_u16 (__a
, __b
, __imm
, __p
);
28679 __extension__
extern __inline int16x8_t
28680 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28681 __arm_vqrshrntq_m (int16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28683 return __arm_vqrshrntq_m_n_s32 (__a
, __b
, __imm
, __p
);
28686 __extension__
extern __inline int8x16_t
28687 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28688 __arm_vqrshrntq_m (int8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28690 return __arm_vqrshrntq_m_n_s16 (__a
, __b
, __imm
, __p
);
28693 __extension__
extern __inline uint16x8_t
28694 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28695 __arm_vqrshrntq_m (uint16x8_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28697 return __arm_vqrshrntq_m_n_u32 (__a
, __b
, __imm
, __p
);
28700 __extension__
extern __inline uint8x16_t
28701 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28702 __arm_vqrshrntq_m (uint8x16_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28704 return __arm_vqrshrntq_m_n_u16 (__a
, __b
, __imm
, __p
);
28707 __extension__
extern __inline uint16x8_t
28708 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28709 __arm_vqrshrunbq_m (uint16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28711 return __arm_vqrshrunbq_m_n_s32 (__a
, __b
, __imm
, __p
);
28714 __extension__
extern __inline uint8x16_t
28715 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28716 __arm_vqrshrunbq_m (uint8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28718 return __arm_vqrshrunbq_m_n_s16 (__a
, __b
, __imm
, __p
);
28721 __extension__
extern __inline uint16x8_t
28722 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28723 __arm_vqrshruntq_m (uint16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28725 return __arm_vqrshruntq_m_n_s32 (__a
, __b
, __imm
, __p
);
28728 __extension__
extern __inline uint8x16_t
28729 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28730 __arm_vqrshruntq_m (uint8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28732 return __arm_vqrshruntq_m_n_s16 (__a
, __b
, __imm
, __p
);
28735 __extension__
extern __inline int16x8_t
28736 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28737 __arm_vqshrnbq_m (int16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28739 return __arm_vqshrnbq_m_n_s32 (__a
, __b
, __imm
, __p
);
28742 __extension__
extern __inline int8x16_t
28743 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28744 __arm_vqshrnbq_m (int8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28746 return __arm_vqshrnbq_m_n_s16 (__a
, __b
, __imm
, __p
);
28749 __extension__
extern __inline uint16x8_t
28750 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28751 __arm_vqshrnbq_m (uint16x8_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28753 return __arm_vqshrnbq_m_n_u32 (__a
, __b
, __imm
, __p
);
28756 __extension__
extern __inline uint8x16_t
28757 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28758 __arm_vqshrnbq_m (uint8x16_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28760 return __arm_vqshrnbq_m_n_u16 (__a
, __b
, __imm
, __p
);
28763 __extension__
extern __inline int16x8_t
28764 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28765 __arm_vqshrntq_m (int16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28767 return __arm_vqshrntq_m_n_s32 (__a
, __b
, __imm
, __p
);
28770 __extension__
extern __inline int8x16_t
28771 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28772 __arm_vqshrntq_m (int8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28774 return __arm_vqshrntq_m_n_s16 (__a
, __b
, __imm
, __p
);
28777 __extension__
extern __inline uint16x8_t
28778 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28779 __arm_vqshrntq_m (uint16x8_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28781 return __arm_vqshrntq_m_n_u32 (__a
, __b
, __imm
, __p
);
28784 __extension__
extern __inline uint8x16_t
28785 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28786 __arm_vqshrntq_m (uint8x16_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28788 return __arm_vqshrntq_m_n_u16 (__a
, __b
, __imm
, __p
);
28791 __extension__
extern __inline uint16x8_t
28792 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28793 __arm_vqshrunbq_m (uint16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28795 return __arm_vqshrunbq_m_n_s32 (__a
, __b
, __imm
, __p
);
28798 __extension__
extern __inline uint8x16_t
28799 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28800 __arm_vqshrunbq_m (uint8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28802 return __arm_vqshrunbq_m_n_s16 (__a
, __b
, __imm
, __p
);
28805 __extension__
extern __inline uint16x8_t
28806 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28807 __arm_vqshruntq_m (uint16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28809 return __arm_vqshruntq_m_n_s32 (__a
, __b
, __imm
, __p
);
28812 __extension__
extern __inline uint8x16_t
28813 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28814 __arm_vqshruntq_m (uint8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28816 return __arm_vqshruntq_m_n_s16 (__a
, __b
, __imm
, __p
);
28819 __extension__
extern __inline
int64_t
28820 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28821 __arm_vrmlaldavhaq_p (int64_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
28823 return __arm_vrmlaldavhaq_p_s32 (__a
, __b
, __c
, __p
);
28826 __extension__
extern __inline
uint64_t
28827 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28828 __arm_vrmlaldavhaq_p (uint64_t __a
, uint32x4_t __b
, uint32x4_t __c
, mve_pred16_t __p
)
28830 return __arm_vrmlaldavhaq_p_u32 (__a
, __b
, __c
, __p
);
28833 __extension__
extern __inline
int64_t
28834 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28835 __arm_vrmlaldavhaxq_p (int64_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
28837 return __arm_vrmlaldavhaxq_p_s32 (__a
, __b
, __c
, __p
);
28840 __extension__
extern __inline
int64_t
28841 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28842 __arm_vrmlsldavhaq_p (int64_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
28844 return __arm_vrmlsldavhaq_p_s32 (__a
, __b
, __c
, __p
);
28847 __extension__
extern __inline
int64_t
28848 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28849 __arm_vrmlsldavhaxq_p (int64_t __a
, int32x4_t __b
, int32x4_t __c
, mve_pred16_t __p
)
28851 return __arm_vrmlsldavhaxq_p_s32 (__a
, __b
, __c
, __p
);
28854 __extension__
extern __inline int16x8_t
28855 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28856 __arm_vrshrnbq_m (int16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28858 return __arm_vrshrnbq_m_n_s32 (__a
, __b
, __imm
, __p
);
28861 __extension__
extern __inline int8x16_t
28862 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28863 __arm_vrshrnbq_m (int8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28865 return __arm_vrshrnbq_m_n_s16 (__a
, __b
, __imm
, __p
);
28868 __extension__
extern __inline uint16x8_t
28869 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28870 __arm_vrshrnbq_m (uint16x8_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28872 return __arm_vrshrnbq_m_n_u32 (__a
, __b
, __imm
, __p
);
28875 __extension__
extern __inline uint8x16_t
28876 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28877 __arm_vrshrnbq_m (uint8x16_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28879 return __arm_vrshrnbq_m_n_u16 (__a
, __b
, __imm
, __p
);
28882 __extension__
extern __inline int16x8_t
28883 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28884 __arm_vrshrntq_m (int16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28886 return __arm_vrshrntq_m_n_s32 (__a
, __b
, __imm
, __p
);
28889 __extension__
extern __inline int8x16_t
28890 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28891 __arm_vrshrntq_m (int8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28893 return __arm_vrshrntq_m_n_s16 (__a
, __b
, __imm
, __p
);
28896 __extension__
extern __inline uint16x8_t
28897 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28898 __arm_vrshrntq_m (uint16x8_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28900 return __arm_vrshrntq_m_n_u32 (__a
, __b
, __imm
, __p
);
28903 __extension__
extern __inline uint8x16_t
28904 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28905 __arm_vrshrntq_m (uint8x16_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28907 return __arm_vrshrntq_m_n_u16 (__a
, __b
, __imm
, __p
);
28910 __extension__
extern __inline int16x8_t
28911 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28912 __arm_vshllbq_m (int16x8_t __inactive
, int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
28914 return __arm_vshllbq_m_n_s8 (__inactive
, __a
, __imm
, __p
);
28917 __extension__
extern __inline int32x4_t
28918 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28919 __arm_vshllbq_m (int32x4_t __inactive
, int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
28921 return __arm_vshllbq_m_n_s16 (__inactive
, __a
, __imm
, __p
);
28924 __extension__
extern __inline uint16x8_t
28925 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28926 __arm_vshllbq_m (uint16x8_t __inactive
, uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
28928 return __arm_vshllbq_m_n_u8 (__inactive
, __a
, __imm
, __p
);
28931 __extension__
extern __inline uint32x4_t
28932 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28933 __arm_vshllbq_m (uint32x4_t __inactive
, uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
28935 return __arm_vshllbq_m_n_u16 (__inactive
, __a
, __imm
, __p
);
28938 __extension__
extern __inline int16x8_t
28939 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28940 __arm_vshlltq_m (int16x8_t __inactive
, int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
28942 return __arm_vshlltq_m_n_s8 (__inactive
, __a
, __imm
, __p
);
28945 __extension__
extern __inline int32x4_t
28946 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28947 __arm_vshlltq_m (int32x4_t __inactive
, int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
28949 return __arm_vshlltq_m_n_s16 (__inactive
, __a
, __imm
, __p
);
28952 __extension__
extern __inline uint16x8_t
28953 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28954 __arm_vshlltq_m (uint16x8_t __inactive
, uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
28956 return __arm_vshlltq_m_n_u8 (__inactive
, __a
, __imm
, __p
);
28959 __extension__
extern __inline uint32x4_t
28960 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28961 __arm_vshlltq_m (uint32x4_t __inactive
, uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
28963 return __arm_vshlltq_m_n_u16 (__inactive
, __a
, __imm
, __p
);
28966 __extension__
extern __inline int16x8_t
28967 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28968 __arm_vshrnbq_m (int16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28970 return __arm_vshrnbq_m_n_s32 (__a
, __b
, __imm
, __p
);
28973 __extension__
extern __inline int8x16_t
28974 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28975 __arm_vshrnbq_m (int8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28977 return __arm_vshrnbq_m_n_s16 (__a
, __b
, __imm
, __p
);
28980 __extension__
extern __inline uint16x8_t
28981 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28982 __arm_vshrnbq_m (uint16x8_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28984 return __arm_vshrnbq_m_n_u32 (__a
, __b
, __imm
, __p
);
28987 __extension__
extern __inline uint8x16_t
28988 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28989 __arm_vshrnbq_m (uint8x16_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
28991 return __arm_vshrnbq_m_n_u16 (__a
, __b
, __imm
, __p
);
28994 __extension__
extern __inline int16x8_t
28995 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
28996 __arm_vshrntq_m (int16x8_t __a
, int32x4_t __b
, const int __imm
, mve_pred16_t __p
)
28998 return __arm_vshrntq_m_n_s32 (__a
, __b
, __imm
, __p
);
29001 __extension__
extern __inline int8x16_t
29002 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29003 __arm_vshrntq_m (int8x16_t __a
, int16x8_t __b
, const int __imm
, mve_pred16_t __p
)
29005 return __arm_vshrntq_m_n_s16 (__a
, __b
, __imm
, __p
);
29008 __extension__
extern __inline uint16x8_t
29009 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29010 __arm_vshrntq_m (uint16x8_t __a
, uint32x4_t __b
, const int __imm
, mve_pred16_t __p
)
29012 return __arm_vshrntq_m_n_u32 (__a
, __b
, __imm
, __p
);
29015 __extension__
extern __inline uint8x16_t
29016 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29017 __arm_vshrntq_m (uint8x16_t __a
, uint16x8_t __b
, const int __imm
, mve_pred16_t __p
)
29019 return __arm_vshrntq_m_n_u16 (__a
, __b
, __imm
, __p
);
29022 __extension__
extern __inline
void
29023 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29024 __arm_vstrbq_scatter_offset (int8_t * __base
, uint8x16_t __offset
, int8x16_t __value
)
29026 __arm_vstrbq_scatter_offset_s8 (__base
, __offset
, __value
);
29029 __extension__
extern __inline
void
29030 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29031 __arm_vstrbq_scatter_offset (int8_t * __base
, uint32x4_t __offset
, int32x4_t __value
)
29033 __arm_vstrbq_scatter_offset_s32 (__base
, __offset
, __value
);
29036 __extension__
extern __inline
void
29037 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29038 __arm_vstrbq_scatter_offset (int8_t * __base
, uint16x8_t __offset
, int16x8_t __value
)
29040 __arm_vstrbq_scatter_offset_s16 (__base
, __offset
, __value
);
29043 __extension__
extern __inline
void
29044 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29045 __arm_vstrbq_scatter_offset (uint8_t * __base
, uint8x16_t __offset
, uint8x16_t __value
)
29047 __arm_vstrbq_scatter_offset_u8 (__base
, __offset
, __value
);
29050 __extension__
extern __inline
void
29051 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29052 __arm_vstrbq_scatter_offset (uint8_t * __base
, uint32x4_t __offset
, uint32x4_t __value
)
29054 __arm_vstrbq_scatter_offset_u32 (__base
, __offset
, __value
);
29057 __extension__
extern __inline
void
29058 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29059 __arm_vstrbq_scatter_offset (uint8_t * __base
, uint16x8_t __offset
, uint16x8_t __value
)
29061 __arm_vstrbq_scatter_offset_u16 (__base
, __offset
, __value
);
29064 __extension__
extern __inline
void
29065 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29066 __arm_vstrbq (int8_t * __addr
, int8x16_t __value
)
29068 __arm_vstrbq_s8 (__addr
, __value
);
29071 __extension__
extern __inline
void
29072 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29073 __arm_vstrbq (int8_t * __addr
, int32x4_t __value
)
29075 __arm_vstrbq_s32 (__addr
, __value
);
29078 __extension__
extern __inline
void
29079 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29080 __arm_vstrbq (int8_t * __addr
, int16x8_t __value
)
29082 __arm_vstrbq_s16 (__addr
, __value
);
29085 __extension__
extern __inline
void
29086 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29087 __arm_vstrbq (uint8_t * __addr
, uint8x16_t __value
)
29089 __arm_vstrbq_u8 (__addr
, __value
);
29092 __extension__
extern __inline
void
29093 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29094 __arm_vstrbq (uint8_t * __addr
, uint32x4_t __value
)
29096 __arm_vstrbq_u32 (__addr
, __value
);
29099 __extension__
extern __inline
void
29100 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29101 __arm_vstrbq (uint8_t * __addr
, uint16x8_t __value
)
29103 __arm_vstrbq_u16 (__addr
, __value
);
29106 __extension__
extern __inline
void
29107 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29108 __arm_vstrwq_scatter_base (uint32x4_t __addr
, const int __offset
, int32x4_t __value
)
29110 __arm_vstrwq_scatter_base_s32 (__addr
, __offset
, __value
);
29113 __extension__
extern __inline
void
29114 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29115 __arm_vstrwq_scatter_base (uint32x4_t __addr
, const int __offset
, uint32x4_t __value
)
29117 __arm_vstrwq_scatter_base_u32 (__addr
, __offset
, __value
);
29120 __extension__
extern __inline uint8x16_t
29121 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29122 __arm_vldrbq_gather_offset (uint8_t const * __base
, uint8x16_t __offset
)
29124 return __arm_vldrbq_gather_offset_u8 (__base
, __offset
);
29127 __extension__
extern __inline int8x16_t
29128 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29129 __arm_vldrbq_gather_offset (int8_t const * __base
, uint8x16_t __offset
)
29131 return __arm_vldrbq_gather_offset_s8 (__base
, __offset
);
29134 __extension__
extern __inline uint16x8_t
29135 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29136 __arm_vldrbq_gather_offset (uint8_t const * __base
, uint16x8_t __offset
)
29138 return __arm_vldrbq_gather_offset_u16 (__base
, __offset
);
29141 __extension__
extern __inline int16x8_t
29142 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29143 __arm_vldrbq_gather_offset (int8_t const * __base
, uint16x8_t __offset
)
29145 return __arm_vldrbq_gather_offset_s16 (__base
, __offset
);
29148 __extension__
extern __inline uint32x4_t
29149 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29150 __arm_vldrbq_gather_offset (uint8_t const * __base
, uint32x4_t __offset
)
29152 return __arm_vldrbq_gather_offset_u32 (__base
, __offset
);
29155 __extension__
extern __inline int32x4_t
29156 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29157 __arm_vldrbq_gather_offset (int8_t const * __base
, uint32x4_t __offset
)
29159 return __arm_vldrbq_gather_offset_s32 (__base
, __offset
);
29162 __extension__
extern __inline
void
29163 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29164 __arm_vstrbq_p (int8_t * __addr
, int8x16_t __value
, mve_pred16_t __p
)
29166 __arm_vstrbq_p_s8 (__addr
, __value
, __p
);
29169 __extension__
extern __inline
void
29170 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29171 __arm_vstrbq_p (int8_t * __addr
, int32x4_t __value
, mve_pred16_t __p
)
29173 __arm_vstrbq_p_s32 (__addr
, __value
, __p
);
29176 __extension__
extern __inline
void
29177 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29178 __arm_vstrbq_p (int8_t * __addr
, int16x8_t __value
, mve_pred16_t __p
)
29180 __arm_vstrbq_p_s16 (__addr
, __value
, __p
);
29183 __extension__
extern __inline
void
29184 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29185 __arm_vstrbq_p (uint8_t * __addr
, uint8x16_t __value
, mve_pred16_t __p
)
29187 __arm_vstrbq_p_u8 (__addr
, __value
, __p
);
29190 __extension__
extern __inline
void
29191 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29192 __arm_vstrbq_p (uint8_t * __addr
, uint32x4_t __value
, mve_pred16_t __p
)
29194 __arm_vstrbq_p_u32 (__addr
, __value
, __p
);
29197 __extension__
extern __inline
void
29198 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29199 __arm_vstrbq_p (uint8_t * __addr
, uint16x8_t __value
, mve_pred16_t __p
)
29201 __arm_vstrbq_p_u16 (__addr
, __value
, __p
);
29204 __extension__
extern __inline
void
29205 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29206 __arm_vstrbq_scatter_offset_p (int8_t * __base
, uint8x16_t __offset
, int8x16_t __value
, mve_pred16_t __p
)
29208 __arm_vstrbq_scatter_offset_p_s8 (__base
, __offset
, __value
, __p
);
29211 __extension__
extern __inline
void
29212 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29213 __arm_vstrbq_scatter_offset_p (int8_t * __base
, uint32x4_t __offset
, int32x4_t __value
, mve_pred16_t __p
)
29215 __arm_vstrbq_scatter_offset_p_s32 (__base
, __offset
, __value
, __p
);
29218 __extension__
extern __inline
void
29219 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29220 __arm_vstrbq_scatter_offset_p (int8_t * __base
, uint16x8_t __offset
, int16x8_t __value
, mve_pred16_t __p
)
29222 __arm_vstrbq_scatter_offset_p_s16 (__base
, __offset
, __value
, __p
);
29225 __extension__
extern __inline
void
29226 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29227 __arm_vstrbq_scatter_offset_p (uint8_t * __base
, uint8x16_t __offset
, uint8x16_t __value
, mve_pred16_t __p
)
29229 __arm_vstrbq_scatter_offset_p_u8 (__base
, __offset
, __value
, __p
);
29232 __extension__
extern __inline
void
29233 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29234 __arm_vstrbq_scatter_offset_p (uint8_t * __base
, uint32x4_t __offset
, uint32x4_t __value
, mve_pred16_t __p
)
29236 __arm_vstrbq_scatter_offset_p_u32 (__base
, __offset
, __value
, __p
);
29239 __extension__
extern __inline
void
29240 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29241 __arm_vstrbq_scatter_offset_p (uint8_t * __base
, uint16x8_t __offset
, uint16x8_t __value
, mve_pred16_t __p
)
29243 __arm_vstrbq_scatter_offset_p_u16 (__base
, __offset
, __value
, __p
);
29246 __extension__
extern __inline
void
29247 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29248 __arm_vstrwq_scatter_base_p (uint32x4_t __addr
, const int __offset
, int32x4_t __value
, mve_pred16_t __p
)
29250 __arm_vstrwq_scatter_base_p_s32 (__addr
, __offset
, __value
, __p
);
29253 __extension__
extern __inline
void
29254 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29255 __arm_vstrwq_scatter_base_p (uint32x4_t __addr
, const int __offset
, uint32x4_t __value
, mve_pred16_t __p
)
29257 __arm_vstrwq_scatter_base_p_u32 (__addr
, __offset
, __value
, __p
);
29260 __extension__
extern __inline int8x16_t
29261 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29262 __arm_vldrbq_gather_offset_z (int8_t const * __base
, uint8x16_t __offset
, mve_pred16_t __p
)
29264 return __arm_vldrbq_gather_offset_z_s8 (__base
, __offset
, __p
);
29267 __extension__
extern __inline int32x4_t
29268 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29269 __arm_vldrbq_gather_offset_z (int8_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
29271 return __arm_vldrbq_gather_offset_z_s32 (__base
, __offset
, __p
);
29274 __extension__
extern __inline int16x8_t
29275 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29276 __arm_vldrbq_gather_offset_z (int8_t const * __base
, uint16x8_t __offset
, mve_pred16_t __p
)
29278 return __arm_vldrbq_gather_offset_z_s16 (__base
, __offset
, __p
);
29281 __extension__
extern __inline uint8x16_t
29282 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29283 __arm_vldrbq_gather_offset_z (uint8_t const * __base
, uint8x16_t __offset
, mve_pred16_t __p
)
29285 return __arm_vldrbq_gather_offset_z_u8 (__base
, __offset
, __p
);
29288 __extension__
extern __inline uint32x4_t
29289 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29290 __arm_vldrbq_gather_offset_z (uint8_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
29292 return __arm_vldrbq_gather_offset_z_u32 (__base
, __offset
, __p
);
29295 __extension__
extern __inline uint16x8_t
29296 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29297 __arm_vldrbq_gather_offset_z (uint8_t const * __base
, uint16x8_t __offset
, mve_pred16_t __p
)
29299 return __arm_vldrbq_gather_offset_z_u16 (__base
, __offset
, __p
);
29302 __extension__
extern __inline int8x16_t
29303 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29304 __arm_vld1q (int8_t const * __base
)
29306 return __arm_vld1q_s8 (__base
);
29309 __extension__
extern __inline int32x4_t
29310 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29311 __arm_vld1q (int32_t const * __base
)
29313 return __arm_vld1q_s32 (__base
);
29316 __extension__
extern __inline int16x8_t
29317 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29318 __arm_vld1q (int16_t const * __base
)
29320 return __arm_vld1q_s16 (__base
);
29323 __extension__
extern __inline uint8x16_t
29324 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29325 __arm_vld1q (uint8_t const * __base
)
29327 return __arm_vld1q_u8 (__base
);
29330 __extension__
extern __inline uint32x4_t
29331 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29332 __arm_vld1q (uint32_t const * __base
)
29334 return __arm_vld1q_u32 (__base
);
29337 __extension__
extern __inline uint16x8_t
29338 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29339 __arm_vld1q (uint16_t const * __base
)
29341 return __arm_vld1q_u16 (__base
);
29344 __extension__
extern __inline int32x4_t
29345 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29346 __arm_vldrhq_gather_offset (int16_t const * __base
, uint32x4_t __offset
)
29348 return __arm_vldrhq_gather_offset_s32 (__base
, __offset
);
29351 __extension__
extern __inline int16x8_t
29352 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29353 __arm_vldrhq_gather_offset (int16_t const * __base
, uint16x8_t __offset
)
29355 return __arm_vldrhq_gather_offset_s16 (__base
, __offset
);
29358 __extension__
extern __inline uint32x4_t
29359 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29360 __arm_vldrhq_gather_offset (uint16_t const * __base
, uint32x4_t __offset
)
29362 return __arm_vldrhq_gather_offset_u32 (__base
, __offset
);
29365 __extension__
extern __inline uint16x8_t
29366 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29367 __arm_vldrhq_gather_offset (uint16_t const * __base
, uint16x8_t __offset
)
29369 return __arm_vldrhq_gather_offset_u16 (__base
, __offset
);
29372 __extension__
extern __inline int32x4_t
29373 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29374 __arm_vldrhq_gather_offset_z (int16_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
29376 return __arm_vldrhq_gather_offset_z_s32 (__base
, __offset
, __p
);
29379 __extension__
extern __inline int16x8_t
29380 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29381 __arm_vldrhq_gather_offset_z (int16_t const * __base
, uint16x8_t __offset
, mve_pred16_t __p
)
29383 return __arm_vldrhq_gather_offset_z_s16 (__base
, __offset
, __p
);
29386 __extension__
extern __inline uint32x4_t
29387 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29388 __arm_vldrhq_gather_offset_z (uint16_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
29390 return __arm_vldrhq_gather_offset_z_u32 (__base
, __offset
, __p
);
29393 __extension__
extern __inline uint16x8_t
29394 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29395 __arm_vldrhq_gather_offset_z (uint16_t const * __base
, uint16x8_t __offset
, mve_pred16_t __p
)
29397 return __arm_vldrhq_gather_offset_z_u16 (__base
, __offset
, __p
);
29400 __extension__
extern __inline int32x4_t
29401 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29402 __arm_vldrhq_gather_shifted_offset (int16_t const * __base
, uint32x4_t __offset
)
29404 return __arm_vldrhq_gather_shifted_offset_s32 (__base
, __offset
);
29407 __extension__
extern __inline int16x8_t
29408 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29409 __arm_vldrhq_gather_shifted_offset (int16_t const * __base
, uint16x8_t __offset
)
29411 return __arm_vldrhq_gather_shifted_offset_s16 (__base
, __offset
);
29414 __extension__
extern __inline uint32x4_t
29415 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29416 __arm_vldrhq_gather_shifted_offset (uint16_t const * __base
, uint32x4_t __offset
)
29418 return __arm_vldrhq_gather_shifted_offset_u32 (__base
, __offset
);
29421 __extension__
extern __inline uint16x8_t
29422 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29423 __arm_vldrhq_gather_shifted_offset (uint16_t const * __base
, uint16x8_t __offset
)
29425 return __arm_vldrhq_gather_shifted_offset_u16 (__base
, __offset
);
29428 __extension__
extern __inline int32x4_t
29429 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29430 __arm_vldrhq_gather_shifted_offset_z (int16_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
29432 return __arm_vldrhq_gather_shifted_offset_z_s32 (__base
, __offset
, __p
);
29435 __extension__
extern __inline int16x8_t
29436 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29437 __arm_vldrhq_gather_shifted_offset_z (int16_t const * __base
, uint16x8_t __offset
, mve_pred16_t __p
)
29439 return __arm_vldrhq_gather_shifted_offset_z_s16 (__base
, __offset
, __p
);
29442 __extension__
extern __inline uint32x4_t
29443 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29444 __arm_vldrhq_gather_shifted_offset_z (uint16_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
29446 return __arm_vldrhq_gather_shifted_offset_z_u32 (__base
, __offset
, __p
);
29449 __extension__
extern __inline uint16x8_t
29450 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29451 __arm_vldrhq_gather_shifted_offset_z (uint16_t const * __base
, uint16x8_t __offset
, mve_pred16_t __p
)
29453 return __arm_vldrhq_gather_shifted_offset_z_u16 (__base
, __offset
, __p
);
29456 __extension__
extern __inline int64x2_t
29457 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29458 __arm_vldrdq_gather_offset (int64_t const * __base
, uint64x2_t __offset
)
29460 return __arm_vldrdq_gather_offset_s64 (__base
, __offset
);
29463 __extension__
extern __inline uint64x2_t
29464 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29465 __arm_vldrdq_gather_offset (uint64_t const * __base
, uint64x2_t __offset
)
29467 return __arm_vldrdq_gather_offset_u64 (__base
, __offset
);
29470 __extension__
extern __inline int64x2_t
29471 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29472 __arm_vldrdq_gather_offset_z (int64_t const * __base
, uint64x2_t __offset
, mve_pred16_t __p
)
29474 return __arm_vldrdq_gather_offset_z_s64 (__base
, __offset
, __p
);
29477 __extension__
extern __inline uint64x2_t
29478 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29479 __arm_vldrdq_gather_offset_z (uint64_t const * __base
, uint64x2_t __offset
, mve_pred16_t __p
)
29481 return __arm_vldrdq_gather_offset_z_u64 (__base
, __offset
, __p
);
29484 __extension__
extern __inline int64x2_t
29485 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29486 __arm_vldrdq_gather_shifted_offset (int64_t const * __base
, uint64x2_t __offset
)
29488 return __arm_vldrdq_gather_shifted_offset_s64 (__base
, __offset
);
29491 __extension__
extern __inline uint64x2_t
29492 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29493 __arm_vldrdq_gather_shifted_offset (uint64_t const * __base
, uint64x2_t __offset
)
29495 return __arm_vldrdq_gather_shifted_offset_u64 (__base
, __offset
);
29498 __extension__
extern __inline int64x2_t
29499 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29500 __arm_vldrdq_gather_shifted_offset_z (int64_t const * __base
, uint64x2_t __offset
, mve_pred16_t __p
)
29502 return __arm_vldrdq_gather_shifted_offset_z_s64 (__base
, __offset
, __p
);
29505 __extension__
extern __inline uint64x2_t
29506 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29507 __arm_vldrdq_gather_shifted_offset_z (uint64_t const * __base
, uint64x2_t __offset
, mve_pred16_t __p
)
29509 return __arm_vldrdq_gather_shifted_offset_z_u64 (__base
, __offset
, __p
);
29512 __extension__
extern __inline int32x4_t
29513 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29514 __arm_vldrwq_gather_offset (int32_t const * __base
, uint32x4_t __offset
)
29516 return __arm_vldrwq_gather_offset_s32 (__base
, __offset
);
29519 __extension__
extern __inline uint32x4_t
29520 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29521 __arm_vldrwq_gather_offset (uint32_t const * __base
, uint32x4_t __offset
)
29523 return __arm_vldrwq_gather_offset_u32 (__base
, __offset
);
29526 __extension__
extern __inline int32x4_t
29527 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29528 __arm_vldrwq_gather_offset_z (int32_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
29530 return __arm_vldrwq_gather_offset_z_s32 (__base
, __offset
, __p
);
29533 __extension__
extern __inline uint32x4_t
29534 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29535 __arm_vldrwq_gather_offset_z (uint32_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
29537 return __arm_vldrwq_gather_offset_z_u32 (__base
, __offset
, __p
);
29540 __extension__
extern __inline int32x4_t
29541 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29542 __arm_vldrwq_gather_shifted_offset (int32_t const * __base
, uint32x4_t __offset
)
29544 return __arm_vldrwq_gather_shifted_offset_s32 (__base
, __offset
);
29547 __extension__
extern __inline uint32x4_t
29548 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29549 __arm_vldrwq_gather_shifted_offset (uint32_t const * __base
, uint32x4_t __offset
)
29551 return __arm_vldrwq_gather_shifted_offset_u32 (__base
, __offset
);
29554 __extension__
extern __inline int32x4_t
29555 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29556 __arm_vldrwq_gather_shifted_offset_z (int32_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
29558 return __arm_vldrwq_gather_shifted_offset_z_s32 (__base
, __offset
, __p
);
29561 __extension__
extern __inline uint32x4_t
29562 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29563 __arm_vldrwq_gather_shifted_offset_z (uint32_t const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
29565 return __arm_vldrwq_gather_shifted_offset_z_u32 (__base
, __offset
, __p
);
29568 __extension__
extern __inline
void
29569 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29570 __arm_vst1q (int8_t * __addr
, int8x16_t __value
)
29572 __arm_vst1q_s8 (__addr
, __value
);
29575 __extension__
extern __inline
void
29576 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29577 __arm_vst1q (int32_t * __addr
, int32x4_t __value
)
29579 __arm_vst1q_s32 (__addr
, __value
);
29582 __extension__
extern __inline
void
29583 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29584 __arm_vst1q (int16_t * __addr
, int16x8_t __value
)
29586 __arm_vst1q_s16 (__addr
, __value
);
29589 __extension__
extern __inline
void
29590 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29591 __arm_vst1q (uint8_t * __addr
, uint8x16_t __value
)
29593 __arm_vst1q_u8 (__addr
, __value
);
29596 __extension__
extern __inline
void
29597 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29598 __arm_vst1q (uint32_t * __addr
, uint32x4_t __value
)
29600 __arm_vst1q_u32 (__addr
, __value
);
29603 __extension__
extern __inline
void
29604 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29605 __arm_vst1q (uint16_t * __addr
, uint16x8_t __value
)
29607 __arm_vst1q_u16 (__addr
, __value
);
29610 __extension__
extern __inline
void
29611 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29612 __arm_vstrhq_scatter_offset (int16_t * __base
, uint32x4_t __offset
, int32x4_t __value
)
29614 __arm_vstrhq_scatter_offset_s32 (__base
, __offset
, __value
);
29617 __extension__
extern __inline
void
29618 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29619 __arm_vstrhq_scatter_offset (int16_t * __base
, uint16x8_t __offset
, int16x8_t __value
)
29621 __arm_vstrhq_scatter_offset_s16 (__base
, __offset
, __value
);
29624 __extension__
extern __inline
void
29625 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29626 __arm_vstrhq_scatter_offset (uint16_t * __base
, uint32x4_t __offset
, uint32x4_t __value
)
29628 __arm_vstrhq_scatter_offset_u32 (__base
, __offset
, __value
);
29631 __extension__
extern __inline
void
29632 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29633 __arm_vstrhq_scatter_offset (uint16_t * __base
, uint16x8_t __offset
, uint16x8_t __value
)
29635 __arm_vstrhq_scatter_offset_u16 (__base
, __offset
, __value
);
29638 __extension__
extern __inline
void
29639 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29640 __arm_vstrhq_scatter_offset_p (int16_t * __base
, uint32x4_t __offset
, int32x4_t __value
, mve_pred16_t __p
)
29642 __arm_vstrhq_scatter_offset_p_s32 (__base
, __offset
, __value
, __p
);
29645 __extension__
extern __inline
void
29646 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29647 __arm_vstrhq_scatter_offset_p (int16_t * __base
, uint16x8_t __offset
, int16x8_t __value
, mve_pred16_t __p
)
29649 __arm_vstrhq_scatter_offset_p_s16 (__base
, __offset
, __value
, __p
);
29652 __extension__
extern __inline
void
29653 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29654 __arm_vstrhq_scatter_offset_p (uint16_t * __base
, uint32x4_t __offset
, uint32x4_t __value
, mve_pred16_t __p
)
29656 __arm_vstrhq_scatter_offset_p_u32 (__base
, __offset
, __value
, __p
);
29659 __extension__
extern __inline
void
29660 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29661 __arm_vstrhq_scatter_offset_p (uint16_t * __base
, uint16x8_t __offset
, uint16x8_t __value
, mve_pred16_t __p
)
29663 __arm_vstrhq_scatter_offset_p_u16 (__base
, __offset
, __value
, __p
);
29666 __extension__
extern __inline
void
29667 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29668 __arm_vstrhq_scatter_shifted_offset (int16_t * __base
, uint32x4_t __offset
, int32x4_t __value
)
29670 __arm_vstrhq_scatter_shifted_offset_s32 (__base
, __offset
, __value
);
29673 __extension__
extern __inline
void
29674 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29675 __arm_vstrhq_scatter_shifted_offset (int16_t * __base
, uint16x8_t __offset
, int16x8_t __value
)
29677 __arm_vstrhq_scatter_shifted_offset_s16 (__base
, __offset
, __value
);
29680 __extension__
extern __inline
void
29681 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29682 __arm_vstrhq_scatter_shifted_offset (uint16_t * __base
, uint32x4_t __offset
, uint32x4_t __value
)
29684 __arm_vstrhq_scatter_shifted_offset_u32 (__base
, __offset
, __value
);
29687 __extension__
extern __inline
void
29688 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29689 __arm_vstrhq_scatter_shifted_offset (uint16_t * __base
, uint16x8_t __offset
, uint16x8_t __value
)
29691 __arm_vstrhq_scatter_shifted_offset_u16 (__base
, __offset
, __value
);
29694 __extension__
extern __inline
void
29695 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29696 __arm_vstrhq_scatter_shifted_offset_p (int16_t * __base
, uint32x4_t __offset
, int32x4_t __value
, mve_pred16_t __p
)
29698 __arm_vstrhq_scatter_shifted_offset_p_s32 (__base
, __offset
, __value
, __p
);
29701 __extension__
extern __inline
void
29702 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29703 __arm_vstrhq_scatter_shifted_offset_p (int16_t * __base
, uint16x8_t __offset
, int16x8_t __value
, mve_pred16_t __p
)
29705 __arm_vstrhq_scatter_shifted_offset_p_s16 (__base
, __offset
, __value
, __p
);
29708 __extension__
extern __inline
void
29709 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29710 __arm_vstrhq_scatter_shifted_offset_p (uint16_t * __base
, uint32x4_t __offset
, uint32x4_t __value
, mve_pred16_t __p
)
29712 __arm_vstrhq_scatter_shifted_offset_p_u32 (__base
, __offset
, __value
, __p
);
29715 __extension__
extern __inline
void
29716 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29717 __arm_vstrhq_scatter_shifted_offset_p (uint16_t * __base
, uint16x8_t __offset
, uint16x8_t __value
, mve_pred16_t __p
)
29719 __arm_vstrhq_scatter_shifted_offset_p_u16 (__base
, __offset
, __value
, __p
);
29722 __extension__
extern __inline
void
29723 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29724 __arm_vstrhq (int16_t * __addr
, int32x4_t __value
)
29726 __arm_vstrhq_s32 (__addr
, __value
);
29729 __extension__
extern __inline
void
29730 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29731 __arm_vstrhq (int16_t * __addr
, int16x8_t __value
)
29733 __arm_vstrhq_s16 (__addr
, __value
);
29736 __extension__
extern __inline
void
29737 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29738 __arm_vstrhq (uint16_t * __addr
, uint32x4_t __value
)
29740 __arm_vstrhq_u32 (__addr
, __value
);
29743 __extension__
extern __inline
void
29744 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29745 __arm_vstrhq (uint16_t * __addr
, uint16x8_t __value
)
29747 __arm_vstrhq_u16 (__addr
, __value
);
29750 __extension__
extern __inline
void
29751 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29752 __arm_vstrhq_p (int16_t * __addr
, int32x4_t __value
, mve_pred16_t __p
)
29754 __arm_vstrhq_p_s32 (__addr
, __value
, __p
);
29757 __extension__
extern __inline
void
29758 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29759 __arm_vstrhq_p (int16_t * __addr
, int16x8_t __value
, mve_pred16_t __p
)
29761 __arm_vstrhq_p_s16 (__addr
, __value
, __p
);
29764 __extension__
extern __inline
void
29765 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29766 __arm_vstrhq_p (uint16_t * __addr
, uint32x4_t __value
, mve_pred16_t __p
)
29768 __arm_vstrhq_p_u32 (__addr
, __value
, __p
);
29771 __extension__
extern __inline
void
29772 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29773 __arm_vstrhq_p (uint16_t * __addr
, uint16x8_t __value
, mve_pred16_t __p
)
29775 __arm_vstrhq_p_u16 (__addr
, __value
, __p
);
29778 __extension__
extern __inline
void
29779 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29780 __arm_vstrwq (int32_t * __addr
, int32x4_t __value
)
29782 __arm_vstrwq_s32 (__addr
, __value
);
29785 __extension__
extern __inline
void
29786 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29787 __arm_vstrwq (uint32_t * __addr
, uint32x4_t __value
)
29789 __arm_vstrwq_u32 (__addr
, __value
);
29792 __extension__
extern __inline
void
29793 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29794 __arm_vstrwq_p (int32_t * __addr
, int32x4_t __value
, mve_pred16_t __p
)
29796 __arm_vstrwq_p_s32 (__addr
, __value
, __p
);
29799 __extension__
extern __inline
void
29800 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29801 __arm_vstrwq_p (uint32_t * __addr
, uint32x4_t __value
, mve_pred16_t __p
)
29803 __arm_vstrwq_p_u32 (__addr
, __value
, __p
);
29806 __extension__
extern __inline
void
29807 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29808 __arm_vstrdq_scatter_base_p (uint64x2_t __addr
, const int __offset
, int64x2_t __value
, mve_pred16_t __p
)
29810 __arm_vstrdq_scatter_base_p_s64 (__addr
, __offset
, __value
, __p
);
29813 __extension__
extern __inline
void
29814 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29815 __arm_vstrdq_scatter_base_p (uint64x2_t __addr
, const int __offset
, uint64x2_t __value
, mve_pred16_t __p
)
29817 __arm_vstrdq_scatter_base_p_u64 (__addr
, __offset
, __value
, __p
);
29820 __extension__
extern __inline
void
29821 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29822 __arm_vstrdq_scatter_base (uint64x2_t __addr
, const int __offset
, int64x2_t __value
)
29824 __arm_vstrdq_scatter_base_s64 (__addr
, __offset
, __value
);
29827 __extension__
extern __inline
void
29828 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29829 __arm_vstrdq_scatter_base (uint64x2_t __addr
, const int __offset
, uint64x2_t __value
)
29831 __arm_vstrdq_scatter_base_u64 (__addr
, __offset
, __value
);
29834 __extension__
extern __inline
void
29835 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29836 __arm_vstrdq_scatter_offset_p (int64_t * __base
, uint64x2_t __offset
, int64x2_t __value
, mve_pred16_t __p
)
29838 __arm_vstrdq_scatter_offset_p_s64 (__base
, __offset
, __value
, __p
);
29841 __extension__
extern __inline
void
29842 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29843 __arm_vstrdq_scatter_offset_p (uint64_t * __base
, uint64x2_t __offset
, uint64x2_t __value
, mve_pred16_t __p
)
29845 __arm_vstrdq_scatter_offset_p_u64 (__base
, __offset
, __value
, __p
);
29848 __extension__
extern __inline
void
29849 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29850 __arm_vstrdq_scatter_offset (int64_t * __base
, uint64x2_t __offset
, int64x2_t __value
)
29852 __arm_vstrdq_scatter_offset_s64 (__base
, __offset
, __value
);
29855 __extension__
extern __inline
void
29856 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29857 __arm_vstrdq_scatter_offset (uint64_t * __base
, uint64x2_t __offset
, uint64x2_t __value
)
29859 __arm_vstrdq_scatter_offset_u64 (__base
, __offset
, __value
);
29862 __extension__
extern __inline
void
29863 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29864 __arm_vstrdq_scatter_shifted_offset_p (int64_t * __base
, uint64x2_t __offset
, int64x2_t __value
, mve_pred16_t __p
)
29866 __arm_vstrdq_scatter_shifted_offset_p_s64 (__base
, __offset
, __value
, __p
);
29869 __extension__
extern __inline
void
29870 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29871 __arm_vstrdq_scatter_shifted_offset_p (uint64_t * __base
, uint64x2_t __offset
, uint64x2_t __value
, mve_pred16_t __p
)
29873 __arm_vstrdq_scatter_shifted_offset_p_u64 (__base
, __offset
, __value
, __p
);
29876 __extension__
extern __inline
void
29877 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29878 __arm_vstrdq_scatter_shifted_offset (int64_t * __base
, uint64x2_t __offset
, int64x2_t __value
)
29880 __arm_vstrdq_scatter_shifted_offset_s64 (__base
, __offset
, __value
);
29883 __extension__
extern __inline
void
29884 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29885 __arm_vstrdq_scatter_shifted_offset (uint64_t * __base
, uint64x2_t __offset
, uint64x2_t __value
)
29887 __arm_vstrdq_scatter_shifted_offset_u64 (__base
, __offset
, __value
);
29890 __extension__
extern __inline
void
29891 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29892 __arm_vstrwq_scatter_offset_p (int32_t * __base
, uint32x4_t __offset
, int32x4_t __value
, mve_pred16_t __p
)
29894 __arm_vstrwq_scatter_offset_p_s32 (__base
, __offset
, __value
, __p
);
29897 __extension__
extern __inline
void
29898 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29899 __arm_vstrwq_scatter_offset_p (uint32_t * __base
, uint32x4_t __offset
, uint32x4_t __value
, mve_pred16_t __p
)
29901 __arm_vstrwq_scatter_offset_p_u32 (__base
, __offset
, __value
, __p
);
29904 __extension__
extern __inline
void
29905 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29906 __arm_vstrwq_scatter_offset (int32_t * __base
, uint32x4_t __offset
, int32x4_t __value
)
29908 __arm_vstrwq_scatter_offset_s32 (__base
, __offset
, __value
);
29911 __extension__
extern __inline
void
29912 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29913 __arm_vstrwq_scatter_offset (uint32_t * __base
, uint32x4_t __offset
, uint32x4_t __value
)
29915 __arm_vstrwq_scatter_offset_u32 (__base
, __offset
, __value
);
29918 __extension__
extern __inline
void
29919 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29920 __arm_vstrwq_scatter_shifted_offset_p (int32_t * __base
, uint32x4_t __offset
, int32x4_t __value
, mve_pred16_t __p
)
29922 __arm_vstrwq_scatter_shifted_offset_p_s32 (__base
, __offset
, __value
, __p
);
29925 __extension__
extern __inline
void
29926 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29927 __arm_vstrwq_scatter_shifted_offset_p (uint32_t * __base
, uint32x4_t __offset
, uint32x4_t __value
, mve_pred16_t __p
)
29929 __arm_vstrwq_scatter_shifted_offset_p_u32 (__base
, __offset
, __value
, __p
);
29932 __extension__
extern __inline
void
29933 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29934 __arm_vstrwq_scatter_shifted_offset (int32_t * __base
, uint32x4_t __offset
, int32x4_t __value
)
29936 __arm_vstrwq_scatter_shifted_offset_s32 (__base
, __offset
, __value
);
29939 __extension__
extern __inline
void
29940 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29941 __arm_vstrwq_scatter_shifted_offset (uint32_t * __base
, uint32x4_t __offset
, uint32x4_t __value
)
29943 __arm_vstrwq_scatter_shifted_offset_u32 (__base
, __offset
, __value
);
29946 __extension__
extern __inline int8x16_t
29947 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29948 __arm_vaddq (int8x16_t __a
, int8x16_t __b
)
29950 return __arm_vaddq_s8 (__a
, __b
);
29953 __extension__
extern __inline int16x8_t
29954 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29955 __arm_vaddq (int16x8_t __a
, int16x8_t __b
)
29957 return __arm_vaddq_s16 (__a
, __b
);
29960 __extension__
extern __inline int32x4_t
29961 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29962 __arm_vaddq (int32x4_t __a
, int32x4_t __b
)
29964 return __arm_vaddq_s32 (__a
, __b
);
29967 __extension__
extern __inline uint8x16_t
29968 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29969 __arm_vaddq (uint8x16_t __a
, uint8x16_t __b
)
29971 return __arm_vaddq_u8 (__a
, __b
);
29974 __extension__
extern __inline uint16x8_t
29975 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29976 __arm_vaddq (uint16x8_t __a
, uint16x8_t __b
)
29978 return __arm_vaddq_u16 (__a
, __b
);
29981 __extension__
extern __inline uint32x4_t
29982 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29983 __arm_vaddq (uint32x4_t __a
, uint32x4_t __b
)
29985 return __arm_vaddq_u32 (__a
, __b
);
29988 __extension__
extern __inline uint8x16_t
29989 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29990 __arm_vddupq_m (uint8x16_t __inactive
, uint32_t __a
, const int __imm
, mve_pred16_t __p
)
29992 return __arm_vddupq_m_n_u8 (__inactive
, __a
, __imm
, __p
);
29995 __extension__
extern __inline uint32x4_t
29996 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
29997 __arm_vddupq_m (uint32x4_t __inactive
, uint32_t __a
, const int __imm
, mve_pred16_t __p
)
29999 return __arm_vddupq_m_n_u32 (__inactive
, __a
, __imm
, __p
);
30002 __extension__
extern __inline uint16x8_t
30003 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30004 __arm_vddupq_m (uint16x8_t __inactive
, uint32_t __a
, const int __imm
, mve_pred16_t __p
)
30006 return __arm_vddupq_m_n_u16 (__inactive
, __a
, __imm
, __p
);
30009 __extension__
extern __inline uint8x16_t
30010 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30011 __arm_vddupq_m (uint8x16_t __inactive
, uint32_t * __a
, const int __imm
, mve_pred16_t __p
)
30013 return __arm_vddupq_m_wb_u8 (__inactive
, __a
, __imm
, __p
);
30016 __extension__
extern __inline uint16x8_t
30017 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30018 __arm_vddupq_m (uint16x8_t __inactive
, uint32_t * __a
, const int __imm
, mve_pred16_t __p
)
30020 return __arm_vddupq_m_wb_u16 (__inactive
, __a
, __imm
, __p
);
30023 __extension__
extern __inline uint32x4_t
30024 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30025 __arm_vddupq_m (uint32x4_t __inactive
, uint32_t * __a
, const int __imm
, mve_pred16_t __p
)
30027 return __arm_vddupq_m_wb_u32 (__inactive
, __a
, __imm
, __p
);
30030 __extension__
extern __inline uint8x16_t
30031 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30032 __arm_vddupq_u8 (uint32_t __a
, const int __imm
)
30034 return __arm_vddupq_n_u8 (__a
, __imm
);
30037 __extension__
extern __inline uint32x4_t
30038 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30039 __arm_vddupq_u32 (uint32_t __a
, const int __imm
)
30041 return __arm_vddupq_n_u32 (__a
, __imm
);
30044 __extension__
extern __inline uint16x8_t
30045 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30046 __arm_vddupq_u16 (uint32_t __a
, const int __imm
)
30048 return __arm_vddupq_n_u16 (__a
, __imm
);
30051 __extension__
extern __inline uint8x16_t
30052 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30053 __arm_vdwdupq_m (uint8x16_t __inactive
, uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30055 return __arm_vdwdupq_m_n_u8 (__inactive
, __a
, __b
, __imm
, __p
);
30058 __extension__
extern __inline uint32x4_t
30059 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30060 __arm_vdwdupq_m (uint32x4_t __inactive
, uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30062 return __arm_vdwdupq_m_n_u32 (__inactive
, __a
, __b
, __imm
, __p
);
30065 __extension__
extern __inline uint16x8_t
30066 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30067 __arm_vdwdupq_m (uint16x8_t __inactive
, uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30069 return __arm_vdwdupq_m_n_u16 (__inactive
, __a
, __b
, __imm
, __p
);
30072 __extension__
extern __inline uint8x16_t
30073 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30074 __arm_vdwdupq_m (uint8x16_t __inactive
, uint32_t * __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30076 return __arm_vdwdupq_m_wb_u8 (__inactive
, __a
, __b
, __imm
, __p
);
30079 __extension__
extern __inline uint32x4_t
30080 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30081 __arm_vdwdupq_m (uint32x4_t __inactive
, uint32_t * __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30083 return __arm_vdwdupq_m_wb_u32 (__inactive
, __a
, __b
, __imm
, __p
);
30086 __extension__
extern __inline uint16x8_t
30087 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30088 __arm_vdwdupq_m (uint16x8_t __inactive
, uint32_t * __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30090 return __arm_vdwdupq_m_wb_u16 (__inactive
, __a
, __b
, __imm
, __p
);
30093 __extension__
extern __inline uint8x16_t
30094 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30095 __arm_vdwdupq_u8 (uint32_t __a
, uint32_t __b
, const int __imm
)
30097 return __arm_vdwdupq_n_u8 (__a
, __b
, __imm
);
30100 __extension__
extern __inline uint32x4_t
30101 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30102 __arm_vdwdupq_u32 (uint32_t __a
, uint32_t __b
, const int __imm
)
30104 return __arm_vdwdupq_n_u32 (__a
, __b
, __imm
);
30107 __extension__
extern __inline uint16x8_t
30108 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30109 __arm_vdwdupq_u16 (uint32_t __a
, uint32_t __b
, const int __imm
)
30111 return __arm_vdwdupq_n_u16 (__a
, __b
, __imm
);
30114 __extension__
extern __inline uint8x16_t
30115 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30116 __arm_vdwdupq_u8 (uint32_t * __a
, uint32_t __b
, const int __imm
)
30118 return __arm_vdwdupq_wb_u8 (__a
, __b
, __imm
);
30121 __extension__
extern __inline uint32x4_t
30122 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30123 __arm_vdwdupq_u32 (uint32_t * __a
, uint32_t __b
, const int __imm
)
30125 return __arm_vdwdupq_wb_u32 (__a
, __b
, __imm
);
30128 __extension__
extern __inline uint16x8_t
30129 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30130 __arm_vdwdupq_u16 (uint32_t * __a
, uint32_t __b
, const int __imm
)
30132 return __arm_vdwdupq_wb_u16 (__a
, __b
, __imm
);
30135 __extension__
extern __inline uint8x16_t
30136 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30137 __arm_vidupq_m (uint8x16_t __inactive
, uint32_t __a
, const int __imm
, mve_pred16_t __p
)
30139 return __arm_vidupq_m_n_u8 (__inactive
, __a
, __imm
, __p
);
30142 __extension__
extern __inline uint32x4_t
30143 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30144 __arm_vidupq_m (uint32x4_t __inactive
, uint32_t __a
, const int __imm
, mve_pred16_t __p
)
30146 return __arm_vidupq_m_n_u32 (__inactive
, __a
, __imm
, __p
);
30149 __extension__
extern __inline uint16x8_t
30150 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30151 __arm_vidupq_m (uint16x8_t __inactive
, uint32_t __a
, const int __imm
, mve_pred16_t __p
)
30153 return __arm_vidupq_m_n_u16 (__inactive
, __a
, __imm
, __p
);
30156 __extension__
extern __inline uint8x16_t
30157 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30158 __arm_vidupq_u8 (uint32_t __a
, const int __imm
)
30160 return __arm_vidupq_n_u8 (__a
, __imm
);
30163 __extension__
extern __inline uint8x16_t
30164 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30165 __arm_vidupq_m (uint8x16_t __inactive
, uint32_t * __a
, const int __imm
, mve_pred16_t __p
)
30167 return __arm_vidupq_m_wb_u8 (__inactive
, __a
, __imm
, __p
);
30170 __extension__
extern __inline uint16x8_t
30171 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30172 __arm_vidupq_m (uint16x8_t __inactive
, uint32_t * __a
, const int __imm
, mve_pred16_t __p
)
30174 return __arm_vidupq_m_wb_u16 (__inactive
, __a
, __imm
, __p
);
30177 __extension__
extern __inline uint32x4_t
30178 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30179 __arm_vidupq_m (uint32x4_t __inactive
, uint32_t * __a
, const int __imm
, mve_pred16_t __p
)
30181 return __arm_vidupq_m_wb_u32 (__inactive
, __a
, __imm
, __p
);
30184 __extension__
extern __inline uint32x4_t
30185 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30186 __arm_vidupq_u32 (uint32_t __a
, const int __imm
)
30188 return __arm_vidupq_n_u32 (__a
, __imm
);
30191 __extension__
extern __inline uint16x8_t
30192 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30193 __arm_vidupq_u16 (uint32_t __a
, const int __imm
)
30195 return __arm_vidupq_n_u16 (__a
, __imm
);
30198 __extension__
extern __inline uint8x16_t
30199 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30200 __arm_vidupq_u8 (uint32_t * __a
, const int __imm
)
30202 return __arm_vidupq_wb_u8 (__a
, __imm
);
30205 __extension__
extern __inline uint16x8_t
30206 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30207 __arm_vidupq_u16 (uint32_t * __a
, const int __imm
)
30209 return __arm_vidupq_wb_u16 (__a
, __imm
);
30212 __extension__
extern __inline uint32x4_t
30213 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30214 __arm_vidupq_u32 (uint32_t * __a
, const int __imm
)
30216 return __arm_vidupq_wb_u32 (__a
, __imm
);
30219 __extension__
extern __inline uint8x16_t
30220 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30221 __arm_vddupq_u8 (uint32_t * __a
, const int __imm
)
30223 return __arm_vddupq_wb_u8 (__a
, __imm
);
30226 __extension__
extern __inline uint16x8_t
30227 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30228 __arm_vddupq_u16 (uint32_t * __a
, const int __imm
)
30230 return __arm_vddupq_wb_u16 (__a
, __imm
);
30233 __extension__
extern __inline uint32x4_t
30234 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30235 __arm_vddupq_u32 (uint32_t * __a
, const int __imm
)
30237 return __arm_vddupq_wb_u32 (__a
, __imm
);
30240 __extension__
extern __inline uint8x16_t
30241 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30242 __arm_viwdupq_m (uint8x16_t __inactive
, uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30244 return __arm_viwdupq_m_n_u8 (__inactive
, __a
, __b
, __imm
, __p
);
30247 __extension__
extern __inline uint32x4_t
30248 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30249 __arm_viwdupq_m (uint32x4_t __inactive
, uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30251 return __arm_viwdupq_m_n_u32 (__inactive
, __a
, __b
, __imm
, __p
);
30254 __extension__
extern __inline uint16x8_t
30255 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30256 __arm_viwdupq_m (uint16x8_t __inactive
, uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30258 return __arm_viwdupq_m_n_u16 (__inactive
, __a
, __b
, __imm
, __p
);
30261 __extension__
extern __inline uint8x16_t
30262 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30263 __arm_viwdupq_m (uint8x16_t __inactive
, uint32_t * __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30265 return __arm_viwdupq_m_wb_u8 (__inactive
, __a
, __b
, __imm
, __p
);
30268 __extension__
extern __inline uint32x4_t
30269 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30270 __arm_viwdupq_m (uint32x4_t __inactive
, uint32_t * __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30272 return __arm_viwdupq_m_wb_u32 (__inactive
, __a
, __b
, __imm
, __p
);
30275 __extension__
extern __inline uint16x8_t
30276 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30277 __arm_viwdupq_m (uint16x8_t __inactive
, uint32_t * __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30279 return __arm_viwdupq_m_wb_u16 (__inactive
, __a
, __b
, __imm
, __p
);
30282 __extension__
extern __inline uint8x16_t
30283 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30284 __arm_viwdupq_u8 (uint32_t __a
, uint32_t __b
, const int __imm
)
30286 return __arm_viwdupq_n_u8 (__a
, __b
, __imm
);
30289 __extension__
extern __inline uint32x4_t
30290 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30291 __arm_viwdupq_u32 (uint32_t __a
, uint32_t __b
, const int __imm
)
30293 return __arm_viwdupq_n_u32 (__a
, __b
, __imm
);
30296 __extension__
extern __inline uint16x8_t
30297 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30298 __arm_viwdupq_u16 (uint32_t __a
, uint32_t __b
, const int __imm
)
30300 return __arm_viwdupq_n_u16 (__a
, __b
, __imm
);
30303 __extension__
extern __inline uint8x16_t
30304 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30305 __arm_viwdupq_u8 (uint32_t * __a
, uint32_t __b
, const int __imm
)
30307 return __arm_viwdupq_wb_u8 (__a
, __b
, __imm
);
30310 __extension__
extern __inline uint32x4_t
30311 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30312 __arm_viwdupq_u32 (uint32_t * __a
, uint32_t __b
, const int __imm
)
30314 return __arm_viwdupq_wb_u32 (__a
, __b
, __imm
);
30317 __extension__
extern __inline uint16x8_t
30318 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30319 __arm_viwdupq_u16 (uint32_t * __a
, uint32_t __b
, const int __imm
)
30321 return __arm_viwdupq_wb_u16 (__a
, __b
, __imm
);
30324 __extension__
extern __inline
void
30325 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30326 __arm_vstrdq_scatter_base_wb (uint64x2_t
* __addr
, const int __offset
, int64x2_t __value
)
30328 __arm_vstrdq_scatter_base_wb_s64 (__addr
, __offset
, __value
);
30331 __extension__
extern __inline
void
30332 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30333 __arm_vstrdq_scatter_base_wb (uint64x2_t
* __addr
, const int __offset
, uint64x2_t __value
)
30335 __arm_vstrdq_scatter_base_wb_u64 (__addr
, __offset
, __value
);
30338 __extension__
extern __inline
void
30339 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30340 __arm_vstrdq_scatter_base_wb_p (uint64x2_t
* __addr
, const int __offset
, int64x2_t __value
, mve_pred16_t __p
)
30342 __arm_vstrdq_scatter_base_wb_p_s64 (__addr
, __offset
, __value
, __p
);
30345 __extension__
extern __inline
void
30346 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30347 __arm_vstrdq_scatter_base_wb_p (uint64x2_t
* __addr
, const int __offset
, uint64x2_t __value
, mve_pred16_t __p
)
30349 __arm_vstrdq_scatter_base_wb_p_u64 (__addr
, __offset
, __value
, __p
);
30352 __extension__
extern __inline
void
30353 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30354 __arm_vstrwq_scatter_base_wb_p (uint32x4_t
* __addr
, const int __offset
, int32x4_t __value
, mve_pred16_t __p
)
30356 __arm_vstrwq_scatter_base_wb_p_s32 (__addr
, __offset
, __value
, __p
);
30359 __extension__
extern __inline
void
30360 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30361 __arm_vstrwq_scatter_base_wb_p (uint32x4_t
* __addr
, const int __offset
, uint32x4_t __value
, mve_pred16_t __p
)
30363 __arm_vstrwq_scatter_base_wb_p_u32 (__addr
, __offset
, __value
, __p
);
30366 __extension__
extern __inline
void
30367 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30368 __arm_vstrwq_scatter_base_wb (uint32x4_t
* __addr
, const int __offset
, int32x4_t __value
)
30370 __arm_vstrwq_scatter_base_wb_s32 (__addr
, __offset
, __value
);
30373 __extension__
extern __inline
void
30374 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30375 __arm_vstrwq_scatter_base_wb (uint32x4_t
* __addr
, const int __offset
, uint32x4_t __value
)
30377 __arm_vstrwq_scatter_base_wb_u32 (__addr
, __offset
, __value
);
30380 __extension__
extern __inline uint8x16_t
30381 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30382 __arm_vddupq_x_u8 (uint32_t __a
, const int __imm
, mve_pred16_t __p
)
30384 return __arm_vddupq_x_n_u8 (__a
, __imm
, __p
);
30387 __extension__
extern __inline uint16x8_t
30388 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30389 __arm_vddupq_x_u16 (uint32_t __a
, const int __imm
, mve_pred16_t __p
)
30391 return __arm_vddupq_x_n_u16 (__a
, __imm
, __p
);
30394 __extension__
extern __inline uint32x4_t
30395 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30396 __arm_vddupq_x_u32 (uint32_t __a
, const int __imm
, mve_pred16_t __p
)
30398 return __arm_vddupq_x_n_u32 (__a
, __imm
, __p
);
30401 __extension__
extern __inline uint8x16_t
30402 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30403 __arm_vddupq_x_u8 (uint32_t *__a
, const int __imm
, mve_pred16_t __p
)
30405 return __arm_vddupq_x_wb_u8 (__a
, __imm
, __p
);
30408 __extension__
extern __inline uint16x8_t
30409 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30410 __arm_vddupq_x_u16 (uint32_t *__a
, const int __imm
, mve_pred16_t __p
)
30412 return __arm_vddupq_x_wb_u16 (__a
, __imm
, __p
);
30415 __extension__
extern __inline uint32x4_t
30416 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30417 __arm_vddupq_x_u32 (uint32_t *__a
, const int __imm
, mve_pred16_t __p
)
30419 return __arm_vddupq_x_wb_u32 (__a
, __imm
, __p
);
30422 __extension__
extern __inline uint8x16_t
30423 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30424 __arm_vdwdupq_x_u8 (uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30426 return __arm_vdwdupq_x_n_u8 (__a
, __b
, __imm
, __p
);
30429 __extension__
extern __inline uint16x8_t
30430 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30431 __arm_vdwdupq_x_u16 (uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30433 return __arm_vdwdupq_x_n_u16 (__a
, __b
, __imm
, __p
);
30436 __extension__
extern __inline uint32x4_t
30437 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30438 __arm_vdwdupq_x_u32 (uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30440 return __arm_vdwdupq_x_n_u32 (__a
, __b
, __imm
, __p
);
30443 __extension__
extern __inline uint8x16_t
30444 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30445 __arm_vdwdupq_x_u8 (uint32_t *__a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30447 return __arm_vdwdupq_x_wb_u8 (__a
, __b
, __imm
, __p
);
30450 __extension__
extern __inline uint16x8_t
30451 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30452 __arm_vdwdupq_x_u16 (uint32_t *__a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30454 return __arm_vdwdupq_x_wb_u16 (__a
, __b
, __imm
, __p
);
30457 __extension__
extern __inline uint32x4_t
30458 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30459 __arm_vdwdupq_x_u32 (uint32_t *__a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30461 return __arm_vdwdupq_x_wb_u32 (__a
, __b
, __imm
, __p
);
30464 __extension__
extern __inline uint8x16_t
30465 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30466 __arm_vidupq_x_u8 (uint32_t __a
, const int __imm
, mve_pred16_t __p
)
30468 return __arm_vidupq_x_n_u8 (__a
, __imm
, __p
);
30471 __extension__
extern __inline uint16x8_t
30472 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30473 __arm_vidupq_x_u16 (uint32_t __a
, const int __imm
, mve_pred16_t __p
)
30475 return __arm_vidupq_x_n_u16 (__a
, __imm
, __p
);
30478 __extension__
extern __inline uint32x4_t
30479 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30480 __arm_vidupq_x_u32 (uint32_t __a
, const int __imm
, mve_pred16_t __p
)
30482 return __arm_vidupq_x_n_u32 (__a
, __imm
, __p
);
30485 __extension__
extern __inline uint8x16_t
30486 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30487 __arm_vidupq_x_u8 (uint32_t *__a
, const int __imm
, mve_pred16_t __p
)
30489 return __arm_vidupq_x_wb_u8 (__a
, __imm
, __p
);
30492 __extension__
extern __inline uint16x8_t
30493 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30494 __arm_vidupq_x_u16 (uint32_t *__a
, const int __imm
, mve_pred16_t __p
)
30496 return __arm_vidupq_x_wb_u16 (__a
, __imm
, __p
);
30499 __extension__
extern __inline uint32x4_t
30500 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30501 __arm_vidupq_x_u32 (uint32_t *__a
, const int __imm
, mve_pred16_t __p
)
30503 return __arm_vidupq_x_wb_u32 (__a
, __imm
, __p
);
30506 __extension__
extern __inline uint8x16_t
30507 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30508 __arm_viwdupq_x_u8 (uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30510 return __arm_viwdupq_x_n_u8 (__a
, __b
, __imm
, __p
);
30513 __extension__
extern __inline uint16x8_t
30514 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30515 __arm_viwdupq_x_u16 (uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30517 return __arm_viwdupq_x_n_u16 (__a
, __b
, __imm
, __p
);
30520 __extension__
extern __inline uint32x4_t
30521 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30522 __arm_viwdupq_x_u32 (uint32_t __a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30524 return __arm_viwdupq_x_n_u32 (__a
, __b
, __imm
, __p
);
30527 __extension__
extern __inline uint8x16_t
30528 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30529 __arm_viwdupq_x_u8 (uint32_t *__a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30531 return __arm_viwdupq_x_wb_u8 (__a
, __b
, __imm
, __p
);
30534 __extension__
extern __inline uint16x8_t
30535 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30536 __arm_viwdupq_x_u16 (uint32_t *__a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30538 return __arm_viwdupq_x_wb_u16 (__a
, __b
, __imm
, __p
);
30541 __extension__
extern __inline uint32x4_t
30542 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30543 __arm_viwdupq_x_u32 (uint32_t *__a
, uint32_t __b
, const int __imm
, mve_pred16_t __p
)
30545 return __arm_viwdupq_x_wb_u32 (__a
, __b
, __imm
, __p
);
30548 __extension__
extern __inline int8x16_t
30549 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30550 __arm_vminq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
30552 return __arm_vminq_x_s8 (__a
, __b
, __p
);
30555 __extension__
extern __inline int16x8_t
30556 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30557 __arm_vminq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
30559 return __arm_vminq_x_s16 (__a
, __b
, __p
);
30562 __extension__
extern __inline int32x4_t
30563 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30564 __arm_vminq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
30566 return __arm_vminq_x_s32 (__a
, __b
, __p
);
30569 __extension__
extern __inline uint8x16_t
30570 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30571 __arm_vminq_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
30573 return __arm_vminq_x_u8 (__a
, __b
, __p
);
30576 __extension__
extern __inline uint16x8_t
30577 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30578 __arm_vminq_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
30580 return __arm_vminq_x_u16 (__a
, __b
, __p
);
30583 __extension__
extern __inline uint32x4_t
30584 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30585 __arm_vminq_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
30587 return __arm_vminq_x_u32 (__a
, __b
, __p
);
30590 __extension__
extern __inline int8x16_t
30591 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30592 __arm_vmaxq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
30594 return __arm_vmaxq_x_s8 (__a
, __b
, __p
);
30597 __extension__
extern __inline int16x8_t
30598 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30599 __arm_vmaxq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
30601 return __arm_vmaxq_x_s16 (__a
, __b
, __p
);
30604 __extension__
extern __inline int32x4_t
30605 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30606 __arm_vmaxq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
30608 return __arm_vmaxq_x_s32 (__a
, __b
, __p
);
30611 __extension__
extern __inline uint8x16_t
30612 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30613 __arm_vmaxq_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
30615 return __arm_vmaxq_x_u8 (__a
, __b
, __p
);
30618 __extension__
extern __inline uint16x8_t
30619 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30620 __arm_vmaxq_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
30622 return __arm_vmaxq_x_u16 (__a
, __b
, __p
);
30625 __extension__
extern __inline uint32x4_t
30626 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30627 __arm_vmaxq_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
30629 return __arm_vmaxq_x_u32 (__a
, __b
, __p
);
30632 __extension__
extern __inline int8x16_t
30633 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30634 __arm_vabdq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
30636 return __arm_vabdq_x_s8 (__a
, __b
, __p
);
30639 __extension__
extern __inline int16x8_t
30640 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30641 __arm_vabdq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
30643 return __arm_vabdq_x_s16 (__a
, __b
, __p
);
30646 __extension__
extern __inline int32x4_t
30647 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30648 __arm_vabdq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
30650 return __arm_vabdq_x_s32 (__a
, __b
, __p
);
30653 __extension__
extern __inline uint8x16_t
30654 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30655 __arm_vabdq_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
30657 return __arm_vabdq_x_u8 (__a
, __b
, __p
);
30660 __extension__
extern __inline uint16x8_t
30661 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30662 __arm_vabdq_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
30664 return __arm_vabdq_x_u16 (__a
, __b
, __p
);
30667 __extension__
extern __inline uint32x4_t
30668 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30669 __arm_vabdq_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
30671 return __arm_vabdq_x_u32 (__a
, __b
, __p
);
30674 __extension__
extern __inline int8x16_t
30675 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30676 __arm_vabsq_x (int8x16_t __a
, mve_pred16_t __p
)
30678 return __arm_vabsq_x_s8 (__a
, __p
);
30681 __extension__
extern __inline int16x8_t
30682 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30683 __arm_vabsq_x (int16x8_t __a
, mve_pred16_t __p
)
30685 return __arm_vabsq_x_s16 (__a
, __p
);
30688 __extension__
extern __inline int32x4_t
30689 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30690 __arm_vabsq_x (int32x4_t __a
, mve_pred16_t __p
)
30692 return __arm_vabsq_x_s32 (__a
, __p
);
30695 __extension__
extern __inline int8x16_t
30696 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30697 __arm_vaddq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
30699 return __arm_vaddq_x_s8 (__a
, __b
, __p
);
30702 __extension__
extern __inline int16x8_t
30703 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30704 __arm_vaddq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
30706 return __arm_vaddq_x_s16 (__a
, __b
, __p
);
30709 __extension__
extern __inline int32x4_t
30710 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30711 __arm_vaddq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
30713 return __arm_vaddq_x_s32 (__a
, __b
, __p
);
30716 __extension__
extern __inline int8x16_t
30717 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30718 __arm_vaddq_x (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
30720 return __arm_vaddq_x_n_s8 (__a
, __b
, __p
);
30723 __extension__
extern __inline int16x8_t
30724 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30725 __arm_vaddq_x (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
30727 return __arm_vaddq_x_n_s16 (__a
, __b
, __p
);
30730 __extension__
extern __inline int32x4_t
30731 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30732 __arm_vaddq_x (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
30734 return __arm_vaddq_x_n_s32 (__a
, __b
, __p
);
30737 __extension__
extern __inline uint8x16_t
30738 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30739 __arm_vaddq_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
30741 return __arm_vaddq_x_u8 (__a
, __b
, __p
);
30744 __extension__
extern __inline uint16x8_t
30745 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30746 __arm_vaddq_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
30748 return __arm_vaddq_x_u16 (__a
, __b
, __p
);
30751 __extension__
extern __inline uint32x4_t
30752 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30753 __arm_vaddq_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
30755 return __arm_vaddq_x_u32 (__a
, __b
, __p
);
30758 __extension__
extern __inline uint8x16_t
30759 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30760 __arm_vaddq_x (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
30762 return __arm_vaddq_x_n_u8 (__a
, __b
, __p
);
30765 __extension__
extern __inline uint16x8_t
30766 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30767 __arm_vaddq_x (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
30769 return __arm_vaddq_x_n_u16 (__a
, __b
, __p
);
30772 __extension__
extern __inline uint32x4_t
30773 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30774 __arm_vaddq_x (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
30776 return __arm_vaddq_x_n_u32 (__a
, __b
, __p
);
30779 __extension__
extern __inline int8x16_t
30780 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30781 __arm_vclsq_x (int8x16_t __a
, mve_pred16_t __p
)
30783 return __arm_vclsq_x_s8 (__a
, __p
);
30786 __extension__
extern __inline int16x8_t
30787 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30788 __arm_vclsq_x (int16x8_t __a
, mve_pred16_t __p
)
30790 return __arm_vclsq_x_s16 (__a
, __p
);
30793 __extension__
extern __inline int32x4_t
30794 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30795 __arm_vclsq_x (int32x4_t __a
, mve_pred16_t __p
)
30797 return __arm_vclsq_x_s32 (__a
, __p
);
30800 __extension__
extern __inline int8x16_t
30801 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30802 __arm_vclzq_x (int8x16_t __a
, mve_pred16_t __p
)
30804 return __arm_vclzq_x_s8 (__a
, __p
);
30807 __extension__
extern __inline int16x8_t
30808 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30809 __arm_vclzq_x (int16x8_t __a
, mve_pred16_t __p
)
30811 return __arm_vclzq_x_s16 (__a
, __p
);
30814 __extension__
extern __inline int32x4_t
30815 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30816 __arm_vclzq_x (int32x4_t __a
, mve_pred16_t __p
)
30818 return __arm_vclzq_x_s32 (__a
, __p
);
30821 __extension__
extern __inline uint8x16_t
30822 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30823 __arm_vclzq_x (uint8x16_t __a
, mve_pred16_t __p
)
30825 return __arm_vclzq_x_u8 (__a
, __p
);
30828 __extension__
extern __inline uint16x8_t
30829 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30830 __arm_vclzq_x (uint16x8_t __a
, mve_pred16_t __p
)
30832 return __arm_vclzq_x_u16 (__a
, __p
);
30835 __extension__
extern __inline uint32x4_t
30836 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30837 __arm_vclzq_x (uint32x4_t __a
, mve_pred16_t __p
)
30839 return __arm_vclzq_x_u32 (__a
, __p
);
30842 __extension__
extern __inline int8x16_t
30843 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30844 __arm_vnegq_x (int8x16_t __a
, mve_pred16_t __p
)
30846 return __arm_vnegq_x_s8 (__a
, __p
);
30849 __extension__
extern __inline int16x8_t
30850 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30851 __arm_vnegq_x (int16x8_t __a
, mve_pred16_t __p
)
30853 return __arm_vnegq_x_s16 (__a
, __p
);
30856 __extension__
extern __inline int32x4_t
30857 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30858 __arm_vnegq_x (int32x4_t __a
, mve_pred16_t __p
)
30860 return __arm_vnegq_x_s32 (__a
, __p
);
30863 __extension__
extern __inline int8x16_t
30864 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30865 __arm_vmulhq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
30867 return __arm_vmulhq_x_s8 (__a
, __b
, __p
);
30870 __extension__
extern __inline int16x8_t
30871 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30872 __arm_vmulhq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
30874 return __arm_vmulhq_x_s16 (__a
, __b
, __p
);
30877 __extension__
extern __inline int32x4_t
30878 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30879 __arm_vmulhq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
30881 return __arm_vmulhq_x_s32 (__a
, __b
, __p
);
30884 __extension__
extern __inline uint8x16_t
30885 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30886 __arm_vmulhq_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
30888 return __arm_vmulhq_x_u8 (__a
, __b
, __p
);
30891 __extension__
extern __inline uint16x8_t
30892 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30893 __arm_vmulhq_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
30895 return __arm_vmulhq_x_u16 (__a
, __b
, __p
);
30898 __extension__
extern __inline uint32x4_t
30899 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30900 __arm_vmulhq_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
30902 return __arm_vmulhq_x_u32 (__a
, __b
, __p
);
30905 __extension__
extern __inline uint16x8_t
30906 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30907 __arm_vmullbq_poly_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
30909 return __arm_vmullbq_poly_x_p8 (__a
, __b
, __p
);
30912 __extension__
extern __inline uint32x4_t
30913 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30914 __arm_vmullbq_poly_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
30916 return __arm_vmullbq_poly_x_p16 (__a
, __b
, __p
);
30919 __extension__
extern __inline int16x8_t
30920 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30921 __arm_vmullbq_int_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
30923 return __arm_vmullbq_int_x_s8 (__a
, __b
, __p
);
30926 __extension__
extern __inline int32x4_t
30927 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30928 __arm_vmullbq_int_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
30930 return __arm_vmullbq_int_x_s16 (__a
, __b
, __p
);
30933 __extension__
extern __inline int64x2_t
30934 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30935 __arm_vmullbq_int_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
30937 return __arm_vmullbq_int_x_s32 (__a
, __b
, __p
);
30940 __extension__
extern __inline uint16x8_t
30941 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30942 __arm_vmullbq_int_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
30944 return __arm_vmullbq_int_x_u8 (__a
, __b
, __p
);
30947 __extension__
extern __inline uint32x4_t
30948 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30949 __arm_vmullbq_int_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
30951 return __arm_vmullbq_int_x_u16 (__a
, __b
, __p
);
30954 __extension__
extern __inline uint64x2_t
30955 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30956 __arm_vmullbq_int_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
30958 return __arm_vmullbq_int_x_u32 (__a
, __b
, __p
);
30961 __extension__
extern __inline uint16x8_t
30962 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30963 __arm_vmulltq_poly_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
30965 return __arm_vmulltq_poly_x_p8 (__a
, __b
, __p
);
30968 __extension__
extern __inline uint32x4_t
30969 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30970 __arm_vmulltq_poly_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
30972 return __arm_vmulltq_poly_x_p16 (__a
, __b
, __p
);
30975 __extension__
extern __inline int16x8_t
30976 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30977 __arm_vmulltq_int_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
30979 return __arm_vmulltq_int_x_s8 (__a
, __b
, __p
);
30982 __extension__
extern __inline int32x4_t
30983 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30984 __arm_vmulltq_int_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
30986 return __arm_vmulltq_int_x_s16 (__a
, __b
, __p
);
30989 __extension__
extern __inline int64x2_t
30990 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30991 __arm_vmulltq_int_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
30993 return __arm_vmulltq_int_x_s32 (__a
, __b
, __p
);
30996 __extension__
extern __inline uint16x8_t
30997 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
30998 __arm_vmulltq_int_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
31000 return __arm_vmulltq_int_x_u8 (__a
, __b
, __p
);
31003 __extension__
extern __inline uint32x4_t
31004 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31005 __arm_vmulltq_int_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
31007 return __arm_vmulltq_int_x_u16 (__a
, __b
, __p
);
31010 __extension__
extern __inline uint64x2_t
31011 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31012 __arm_vmulltq_int_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
31014 return __arm_vmulltq_int_x_u32 (__a
, __b
, __p
);
31017 __extension__
extern __inline int8x16_t
31018 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31019 __arm_vmulq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
31021 return __arm_vmulq_x_s8 (__a
, __b
, __p
);
31024 __extension__
extern __inline int16x8_t
31025 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31026 __arm_vmulq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
31028 return __arm_vmulq_x_s16 (__a
, __b
, __p
);
31031 __extension__
extern __inline int32x4_t
31032 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31033 __arm_vmulq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
31035 return __arm_vmulq_x_s32 (__a
, __b
, __p
);
31038 __extension__
extern __inline int8x16_t
31039 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31040 __arm_vmulq_x (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
31042 return __arm_vmulq_x_n_s8 (__a
, __b
, __p
);
31045 __extension__
extern __inline int16x8_t
31046 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31047 __arm_vmulq_x (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
31049 return __arm_vmulq_x_n_s16 (__a
, __b
, __p
);
31052 __extension__
extern __inline int32x4_t
31053 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31054 __arm_vmulq_x (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
31056 return __arm_vmulq_x_n_s32 (__a
, __b
, __p
);
31059 __extension__
extern __inline uint8x16_t
31060 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31061 __arm_vmulq_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
31063 return __arm_vmulq_x_u8 (__a
, __b
, __p
);
31066 __extension__
extern __inline uint16x8_t
31067 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31068 __arm_vmulq_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
31070 return __arm_vmulq_x_u16 (__a
, __b
, __p
);
31073 __extension__
extern __inline uint32x4_t
31074 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31075 __arm_vmulq_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
31077 return __arm_vmulq_x_u32 (__a
, __b
, __p
);
31080 __extension__
extern __inline uint8x16_t
31081 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31082 __arm_vmulq_x (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
31084 return __arm_vmulq_x_n_u8 (__a
, __b
, __p
);
31087 __extension__
extern __inline uint16x8_t
31088 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31089 __arm_vmulq_x (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
31091 return __arm_vmulq_x_n_u16 (__a
, __b
, __p
);
31094 __extension__
extern __inline uint32x4_t
31095 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31096 __arm_vmulq_x (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
31098 return __arm_vmulq_x_n_u32 (__a
, __b
, __p
);
31101 __extension__
extern __inline int8x16_t
31102 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31103 __arm_vsubq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
31105 return __arm_vsubq_x_s8 (__a
, __b
, __p
);
31108 __extension__
extern __inline int16x8_t
31109 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31110 __arm_vsubq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
31112 return __arm_vsubq_x_s16 (__a
, __b
, __p
);
31115 __extension__
extern __inline int32x4_t
31116 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31117 __arm_vsubq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
31119 return __arm_vsubq_x_s32 (__a
, __b
, __p
);
31122 __extension__
extern __inline int8x16_t
31123 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31124 __arm_vsubq_x (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
31126 return __arm_vsubq_x_n_s8 (__a
, __b
, __p
);
31129 __extension__
extern __inline int16x8_t
31130 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31131 __arm_vsubq_x (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
31133 return __arm_vsubq_x_n_s16 (__a
, __b
, __p
);
31136 __extension__
extern __inline int32x4_t
31137 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31138 __arm_vsubq_x (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
31140 return __arm_vsubq_x_n_s32 (__a
, __b
, __p
);
31143 __extension__
extern __inline uint8x16_t
31144 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31145 __arm_vsubq_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
31147 return __arm_vsubq_x_u8 (__a
, __b
, __p
);
31150 __extension__
extern __inline uint16x8_t
31151 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31152 __arm_vsubq_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
31154 return __arm_vsubq_x_u16 (__a
, __b
, __p
);
31157 __extension__
extern __inline uint32x4_t
31158 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31159 __arm_vsubq_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
31161 return __arm_vsubq_x_u32 (__a
, __b
, __p
);
31164 __extension__
extern __inline uint8x16_t
31165 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31166 __arm_vsubq_x (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
31168 return __arm_vsubq_x_n_u8 (__a
, __b
, __p
);
31171 __extension__
extern __inline uint16x8_t
31172 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31173 __arm_vsubq_x (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
31175 return __arm_vsubq_x_n_u16 (__a
, __b
, __p
);
31178 __extension__
extern __inline uint32x4_t
31179 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31180 __arm_vsubq_x (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
31182 return __arm_vsubq_x_n_u32 (__a
, __b
, __p
);
31185 __extension__
extern __inline int8x16_t
31186 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31187 __arm_vcaddq_rot90_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
31189 return __arm_vcaddq_rot90_x_s8 (__a
, __b
, __p
);
31192 __extension__
extern __inline int16x8_t
31193 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31194 __arm_vcaddq_rot90_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
31196 return __arm_vcaddq_rot90_x_s16 (__a
, __b
, __p
);
31199 __extension__
extern __inline int32x4_t
31200 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31201 __arm_vcaddq_rot90_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
31203 return __arm_vcaddq_rot90_x_s32 (__a
, __b
, __p
);
31206 __extension__
extern __inline uint8x16_t
31207 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31208 __arm_vcaddq_rot90_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
31210 return __arm_vcaddq_rot90_x_u8 (__a
, __b
, __p
);
31213 __extension__
extern __inline uint16x8_t
31214 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31215 __arm_vcaddq_rot90_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
31217 return __arm_vcaddq_rot90_x_u16 (__a
, __b
, __p
);
31220 __extension__
extern __inline uint32x4_t
31221 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31222 __arm_vcaddq_rot90_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
31224 return __arm_vcaddq_rot90_x_u32 (__a
, __b
, __p
);
31227 __extension__
extern __inline int8x16_t
31228 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31229 __arm_vcaddq_rot270_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
31231 return __arm_vcaddq_rot270_x_s8 (__a
, __b
, __p
);
31234 __extension__
extern __inline int16x8_t
31235 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31236 __arm_vcaddq_rot270_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
31238 return __arm_vcaddq_rot270_x_s16 (__a
, __b
, __p
);
31241 __extension__
extern __inline int32x4_t
31242 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31243 __arm_vcaddq_rot270_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
31245 return __arm_vcaddq_rot270_x_s32 (__a
, __b
, __p
);
31248 __extension__
extern __inline uint8x16_t
31249 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31250 __arm_vcaddq_rot270_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
31252 return __arm_vcaddq_rot270_x_u8 (__a
, __b
, __p
);
31255 __extension__
extern __inline uint16x8_t
31256 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31257 __arm_vcaddq_rot270_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
31259 return __arm_vcaddq_rot270_x_u16 (__a
, __b
, __p
);
31262 __extension__
extern __inline uint32x4_t
31263 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31264 __arm_vcaddq_rot270_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
31266 return __arm_vcaddq_rot270_x_u32 (__a
, __b
, __p
);
31269 __extension__
extern __inline int8x16_t
31270 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31271 __arm_vhaddq_x (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
31273 return __arm_vhaddq_x_n_s8 (__a
, __b
, __p
);
31276 __extension__
extern __inline int16x8_t
31277 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31278 __arm_vhaddq_x (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
31280 return __arm_vhaddq_x_n_s16 (__a
, __b
, __p
);
31283 __extension__
extern __inline int32x4_t
31284 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31285 __arm_vhaddq_x (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
31287 return __arm_vhaddq_x_n_s32 (__a
, __b
, __p
);
31290 __extension__
extern __inline uint8x16_t
31291 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31292 __arm_vhaddq_x (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
31294 return __arm_vhaddq_x_n_u8 (__a
, __b
, __p
);
31297 __extension__
extern __inline uint16x8_t
31298 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31299 __arm_vhaddq_x (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
31301 return __arm_vhaddq_x_n_u16 (__a
, __b
, __p
);
31304 __extension__
extern __inline uint32x4_t
31305 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31306 __arm_vhaddq_x (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
31308 return __arm_vhaddq_x_n_u32 (__a
, __b
, __p
);
31311 __extension__
extern __inline int8x16_t
31312 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31313 __arm_vhaddq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
31315 return __arm_vhaddq_x_s8 (__a
, __b
, __p
);
31318 __extension__
extern __inline int16x8_t
31319 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31320 __arm_vhaddq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
31322 return __arm_vhaddq_x_s16 (__a
, __b
, __p
);
31325 __extension__
extern __inline int32x4_t
31326 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31327 __arm_vhaddq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
31329 return __arm_vhaddq_x_s32 (__a
, __b
, __p
);
31332 __extension__
extern __inline uint8x16_t
31333 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31334 __arm_vhaddq_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
31336 return __arm_vhaddq_x_u8 (__a
, __b
, __p
);
31339 __extension__
extern __inline uint16x8_t
31340 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31341 __arm_vhaddq_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
31343 return __arm_vhaddq_x_u16 (__a
, __b
, __p
);
31346 __extension__
extern __inline uint32x4_t
31347 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31348 __arm_vhaddq_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
31350 return __arm_vhaddq_x_u32 (__a
, __b
, __p
);
31353 __extension__
extern __inline int8x16_t
31354 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31355 __arm_vhcaddq_rot90_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
31357 return __arm_vhcaddq_rot90_x_s8 (__a
, __b
, __p
);
31360 __extension__
extern __inline int16x8_t
31361 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31362 __arm_vhcaddq_rot90_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
31364 return __arm_vhcaddq_rot90_x_s16 (__a
, __b
, __p
);
31367 __extension__
extern __inline int32x4_t
31368 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31369 __arm_vhcaddq_rot90_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
31371 return __arm_vhcaddq_rot90_x_s32 (__a
, __b
, __p
);
31374 __extension__
extern __inline int8x16_t
31375 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31376 __arm_vhcaddq_rot270_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
31378 return __arm_vhcaddq_rot270_x_s8 (__a
, __b
, __p
);
31381 __extension__
extern __inline int16x8_t
31382 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31383 __arm_vhcaddq_rot270_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
31385 return __arm_vhcaddq_rot270_x_s16 (__a
, __b
, __p
);
31388 __extension__
extern __inline int32x4_t
31389 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31390 __arm_vhcaddq_rot270_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
31392 return __arm_vhcaddq_rot270_x_s32 (__a
, __b
, __p
);
31395 __extension__
extern __inline int8x16_t
31396 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31397 __arm_vhsubq_x (int8x16_t __a
, int8_t __b
, mve_pred16_t __p
)
31399 return __arm_vhsubq_x_n_s8 (__a
, __b
, __p
);
31402 __extension__
extern __inline int16x8_t
31403 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31404 __arm_vhsubq_x (int16x8_t __a
, int16_t __b
, mve_pred16_t __p
)
31406 return __arm_vhsubq_x_n_s16 (__a
, __b
, __p
);
31409 __extension__
extern __inline int32x4_t
31410 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31411 __arm_vhsubq_x (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
31413 return __arm_vhsubq_x_n_s32 (__a
, __b
, __p
);
31416 __extension__
extern __inline uint8x16_t
31417 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31418 __arm_vhsubq_x (uint8x16_t __a
, uint8_t __b
, mve_pred16_t __p
)
31420 return __arm_vhsubq_x_n_u8 (__a
, __b
, __p
);
31423 __extension__
extern __inline uint16x8_t
31424 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31425 __arm_vhsubq_x (uint16x8_t __a
, uint16_t __b
, mve_pred16_t __p
)
31427 return __arm_vhsubq_x_n_u16 (__a
, __b
, __p
);
31430 __extension__
extern __inline uint32x4_t
31431 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31432 __arm_vhsubq_x (uint32x4_t __a
, uint32_t __b
, mve_pred16_t __p
)
31434 return __arm_vhsubq_x_n_u32 (__a
, __b
, __p
);
31437 __extension__
extern __inline int8x16_t
31438 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31439 __arm_vhsubq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
31441 return __arm_vhsubq_x_s8 (__a
, __b
, __p
);
31444 __extension__
extern __inline int16x8_t
31445 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31446 __arm_vhsubq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
31448 return __arm_vhsubq_x_s16 (__a
, __b
, __p
);
31451 __extension__
extern __inline int32x4_t
31452 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31453 __arm_vhsubq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
31455 return __arm_vhsubq_x_s32 (__a
, __b
, __p
);
31458 __extension__
extern __inline uint8x16_t
31459 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31460 __arm_vhsubq_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
31462 return __arm_vhsubq_x_u8 (__a
, __b
, __p
);
31465 __extension__
extern __inline uint16x8_t
31466 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31467 __arm_vhsubq_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
31469 return __arm_vhsubq_x_u16 (__a
, __b
, __p
);
31472 __extension__
extern __inline uint32x4_t
31473 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31474 __arm_vhsubq_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
31476 return __arm_vhsubq_x_u32 (__a
, __b
, __p
);
31479 __extension__
extern __inline int8x16_t
31480 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31481 __arm_vrhaddq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
31483 return __arm_vrhaddq_x_s8 (__a
, __b
, __p
);
31486 __extension__
extern __inline int16x8_t
31487 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31488 __arm_vrhaddq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
31490 return __arm_vrhaddq_x_s16 (__a
, __b
, __p
);
31493 __extension__
extern __inline int32x4_t
31494 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31495 __arm_vrhaddq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
31497 return __arm_vrhaddq_x_s32 (__a
, __b
, __p
);
31500 __extension__
extern __inline uint8x16_t
31501 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31502 __arm_vrhaddq_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
31504 return __arm_vrhaddq_x_u8 (__a
, __b
, __p
);
31507 __extension__
extern __inline uint16x8_t
31508 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31509 __arm_vrhaddq_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
31511 return __arm_vrhaddq_x_u16 (__a
, __b
, __p
);
31514 __extension__
extern __inline uint32x4_t
31515 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31516 __arm_vrhaddq_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
31518 return __arm_vrhaddq_x_u32 (__a
, __b
, __p
);
31521 __extension__
extern __inline int8x16_t
31522 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31523 __arm_vrmulhq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
31525 return __arm_vrmulhq_x_s8 (__a
, __b
, __p
);
31528 __extension__
extern __inline int16x8_t
31529 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31530 __arm_vrmulhq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
31532 return __arm_vrmulhq_x_s16 (__a
, __b
, __p
);
31535 __extension__
extern __inline int32x4_t
31536 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31537 __arm_vrmulhq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
31539 return __arm_vrmulhq_x_s32 (__a
, __b
, __p
);
31542 __extension__
extern __inline uint8x16_t
31543 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31544 __arm_vrmulhq_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
31546 return __arm_vrmulhq_x_u8 (__a
, __b
, __p
);
31549 __extension__
extern __inline uint16x8_t
31550 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31551 __arm_vrmulhq_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
31553 return __arm_vrmulhq_x_u16 (__a
, __b
, __p
);
31556 __extension__
extern __inline uint32x4_t
31557 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31558 __arm_vrmulhq_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
31560 return __arm_vrmulhq_x_u32 (__a
, __b
, __p
);
31563 __extension__
extern __inline int8x16_t
31564 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31565 __arm_vandq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
31567 return __arm_vandq_x_s8 (__a
, __b
, __p
);
31570 __extension__
extern __inline int16x8_t
31571 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31572 __arm_vandq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
31574 return __arm_vandq_x_s16 (__a
, __b
, __p
);
31577 __extension__
extern __inline int32x4_t
31578 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31579 __arm_vandq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
31581 return __arm_vandq_x_s32 (__a
, __b
, __p
);
31584 __extension__
extern __inline uint8x16_t
31585 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31586 __arm_vandq_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
31588 return __arm_vandq_x_u8 (__a
, __b
, __p
);
31591 __extension__
extern __inline uint16x8_t
31592 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31593 __arm_vandq_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
31595 return __arm_vandq_x_u16 (__a
, __b
, __p
);
31598 __extension__
extern __inline uint32x4_t
31599 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31600 __arm_vandq_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
31602 return __arm_vandq_x_u32 (__a
, __b
, __p
);
31605 __extension__
extern __inline int8x16_t
31606 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31607 __arm_vbicq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
31609 return __arm_vbicq_x_s8 (__a
, __b
, __p
);
31612 __extension__
extern __inline int16x8_t
31613 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31614 __arm_vbicq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
31616 return __arm_vbicq_x_s16 (__a
, __b
, __p
);
31619 __extension__
extern __inline int32x4_t
31620 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31621 __arm_vbicq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
31623 return __arm_vbicq_x_s32 (__a
, __b
, __p
);
31626 __extension__
extern __inline uint8x16_t
31627 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31628 __arm_vbicq_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
31630 return __arm_vbicq_x_u8 (__a
, __b
, __p
);
31633 __extension__
extern __inline uint16x8_t
31634 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31635 __arm_vbicq_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
31637 return __arm_vbicq_x_u16 (__a
, __b
, __p
);
31640 __extension__
extern __inline uint32x4_t
31641 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31642 __arm_vbicq_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
31644 return __arm_vbicq_x_u32 (__a
, __b
, __p
);
31647 __extension__
extern __inline int8x16_t
31648 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31649 __arm_vbrsrq_x (int8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
31651 return __arm_vbrsrq_x_n_s8 (__a
, __b
, __p
);
31654 __extension__
extern __inline int16x8_t
31655 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31656 __arm_vbrsrq_x (int16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
31658 return __arm_vbrsrq_x_n_s16 (__a
, __b
, __p
);
31661 __extension__
extern __inline int32x4_t
31662 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31663 __arm_vbrsrq_x (int32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
31665 return __arm_vbrsrq_x_n_s32 (__a
, __b
, __p
);
31668 __extension__
extern __inline uint8x16_t
31669 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31670 __arm_vbrsrq_x (uint8x16_t __a
, int32_t __b
, mve_pred16_t __p
)
31672 return __arm_vbrsrq_x_n_u8 (__a
, __b
, __p
);
31675 __extension__
extern __inline uint16x8_t
31676 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31677 __arm_vbrsrq_x (uint16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
31679 return __arm_vbrsrq_x_n_u16 (__a
, __b
, __p
);
31682 __extension__
extern __inline uint32x4_t
31683 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31684 __arm_vbrsrq_x (uint32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
31686 return __arm_vbrsrq_x_n_u32 (__a
, __b
, __p
);
31689 __extension__
extern __inline int8x16_t
31690 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31691 __arm_veorq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
31693 return __arm_veorq_x_s8 (__a
, __b
, __p
);
31696 __extension__
extern __inline int16x8_t
31697 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31698 __arm_veorq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
31700 return __arm_veorq_x_s16 (__a
, __b
, __p
);
31703 __extension__
extern __inline int32x4_t
31704 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31705 __arm_veorq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
31707 return __arm_veorq_x_s32 (__a
, __b
, __p
);
31710 __extension__
extern __inline uint8x16_t
31711 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31712 __arm_veorq_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
31714 return __arm_veorq_x_u8 (__a
, __b
, __p
);
31717 __extension__
extern __inline uint16x8_t
31718 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31719 __arm_veorq_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
31721 return __arm_veorq_x_u16 (__a
, __b
, __p
);
31724 __extension__
extern __inline uint32x4_t
31725 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31726 __arm_veorq_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
31728 return __arm_veorq_x_u32 (__a
, __b
, __p
);
31731 __extension__
extern __inline int16x8_t
31732 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31733 __arm_vmovlbq_x (int8x16_t __a
, mve_pred16_t __p
)
31735 return __arm_vmovlbq_x_s8 (__a
, __p
);
31738 __extension__
extern __inline int32x4_t
31739 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31740 __arm_vmovlbq_x (int16x8_t __a
, mve_pred16_t __p
)
31742 return __arm_vmovlbq_x_s16 (__a
, __p
);
31745 __extension__
extern __inline uint16x8_t
31746 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31747 __arm_vmovlbq_x (uint8x16_t __a
, mve_pred16_t __p
)
31749 return __arm_vmovlbq_x_u8 (__a
, __p
);
31752 __extension__
extern __inline uint32x4_t
31753 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31754 __arm_vmovlbq_x (uint16x8_t __a
, mve_pred16_t __p
)
31756 return __arm_vmovlbq_x_u16 (__a
, __p
);
31759 __extension__
extern __inline int16x8_t
31760 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31761 __arm_vmovltq_x (int8x16_t __a
, mve_pred16_t __p
)
31763 return __arm_vmovltq_x_s8 (__a
, __p
);
31766 __extension__
extern __inline int32x4_t
31767 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31768 __arm_vmovltq_x (int16x8_t __a
, mve_pred16_t __p
)
31770 return __arm_vmovltq_x_s16 (__a
, __p
);
31773 __extension__
extern __inline uint16x8_t
31774 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31775 __arm_vmovltq_x (uint8x16_t __a
, mve_pred16_t __p
)
31777 return __arm_vmovltq_x_u8 (__a
, __p
);
31780 __extension__
extern __inline uint32x4_t
31781 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31782 __arm_vmovltq_x (uint16x8_t __a
, mve_pred16_t __p
)
31784 return __arm_vmovltq_x_u16 (__a
, __p
);
31787 __extension__
extern __inline int8x16_t
31788 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31789 __arm_vmvnq_x (int8x16_t __a
, mve_pred16_t __p
)
31791 return __arm_vmvnq_x_s8 (__a
, __p
);
31794 __extension__
extern __inline int16x8_t
31795 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31796 __arm_vmvnq_x (int16x8_t __a
, mve_pred16_t __p
)
31798 return __arm_vmvnq_x_s16 (__a
, __p
);
31801 __extension__
extern __inline int32x4_t
31802 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31803 __arm_vmvnq_x (int32x4_t __a
, mve_pred16_t __p
)
31805 return __arm_vmvnq_x_s32 (__a
, __p
);
31808 __extension__
extern __inline uint8x16_t
31809 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31810 __arm_vmvnq_x (uint8x16_t __a
, mve_pred16_t __p
)
31812 return __arm_vmvnq_x_u8 (__a
, __p
);
31815 __extension__
extern __inline uint16x8_t
31816 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31817 __arm_vmvnq_x (uint16x8_t __a
, mve_pred16_t __p
)
31819 return __arm_vmvnq_x_u16 (__a
, __p
);
31822 __extension__
extern __inline uint32x4_t
31823 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31824 __arm_vmvnq_x (uint32x4_t __a
, mve_pred16_t __p
)
31826 return __arm_vmvnq_x_u32 (__a
, __p
);
31829 __extension__
extern __inline int8x16_t
31830 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31831 __arm_vornq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
31833 return __arm_vornq_x_s8 (__a
, __b
, __p
);
31836 __extension__
extern __inline int16x8_t
31837 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31838 __arm_vornq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
31840 return __arm_vornq_x_s16 (__a
, __b
, __p
);
31843 __extension__
extern __inline int32x4_t
31844 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31845 __arm_vornq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
31847 return __arm_vornq_x_s32 (__a
, __b
, __p
);
31850 __extension__
extern __inline uint8x16_t
31851 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31852 __arm_vornq_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
31854 return __arm_vornq_x_u8 (__a
, __b
, __p
);
31857 __extension__
extern __inline uint16x8_t
31858 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31859 __arm_vornq_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
31861 return __arm_vornq_x_u16 (__a
, __b
, __p
);
31864 __extension__
extern __inline uint32x4_t
31865 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31866 __arm_vornq_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
31868 return __arm_vornq_x_u32 (__a
, __b
, __p
);
31871 __extension__
extern __inline int8x16_t
31872 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31873 __arm_vorrq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
31875 return __arm_vorrq_x_s8 (__a
, __b
, __p
);
31878 __extension__
extern __inline int16x8_t
31879 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31880 __arm_vorrq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
31882 return __arm_vorrq_x_s16 (__a
, __b
, __p
);
31885 __extension__
extern __inline int32x4_t
31886 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31887 __arm_vorrq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
31889 return __arm_vorrq_x_s32 (__a
, __b
, __p
);
31892 __extension__
extern __inline uint8x16_t
31893 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31894 __arm_vorrq_x (uint8x16_t __a
, uint8x16_t __b
, mve_pred16_t __p
)
31896 return __arm_vorrq_x_u8 (__a
, __b
, __p
);
31899 __extension__
extern __inline uint16x8_t
31900 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31901 __arm_vorrq_x (uint16x8_t __a
, uint16x8_t __b
, mve_pred16_t __p
)
31903 return __arm_vorrq_x_u16 (__a
, __b
, __p
);
31906 __extension__
extern __inline uint32x4_t
31907 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31908 __arm_vorrq_x (uint32x4_t __a
, uint32x4_t __b
, mve_pred16_t __p
)
31910 return __arm_vorrq_x_u32 (__a
, __b
, __p
);
31913 __extension__
extern __inline int8x16_t
31914 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31915 __arm_vrev16q_x (int8x16_t __a
, mve_pred16_t __p
)
31917 return __arm_vrev16q_x_s8 (__a
, __p
);
31920 __extension__
extern __inline uint8x16_t
31921 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31922 __arm_vrev16q_x (uint8x16_t __a
, mve_pred16_t __p
)
31924 return __arm_vrev16q_x_u8 (__a
, __p
);
31927 __extension__
extern __inline int8x16_t
31928 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31929 __arm_vrev32q_x (int8x16_t __a
, mve_pred16_t __p
)
31931 return __arm_vrev32q_x_s8 (__a
, __p
);
31934 __extension__
extern __inline int16x8_t
31935 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31936 __arm_vrev32q_x (int16x8_t __a
, mve_pred16_t __p
)
31938 return __arm_vrev32q_x_s16 (__a
, __p
);
31941 __extension__
extern __inline uint8x16_t
31942 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31943 __arm_vrev32q_x (uint8x16_t __a
, mve_pred16_t __p
)
31945 return __arm_vrev32q_x_u8 (__a
, __p
);
31948 __extension__
extern __inline uint16x8_t
31949 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31950 __arm_vrev32q_x (uint16x8_t __a
, mve_pred16_t __p
)
31952 return __arm_vrev32q_x_u16 (__a
, __p
);
31955 __extension__
extern __inline int8x16_t
31956 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31957 __arm_vrev64q_x (int8x16_t __a
, mve_pred16_t __p
)
31959 return __arm_vrev64q_x_s8 (__a
, __p
);
31962 __extension__
extern __inline int16x8_t
31963 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31964 __arm_vrev64q_x (int16x8_t __a
, mve_pred16_t __p
)
31966 return __arm_vrev64q_x_s16 (__a
, __p
);
31969 __extension__
extern __inline int32x4_t
31970 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31971 __arm_vrev64q_x (int32x4_t __a
, mve_pred16_t __p
)
31973 return __arm_vrev64q_x_s32 (__a
, __p
);
31976 __extension__
extern __inline uint8x16_t
31977 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31978 __arm_vrev64q_x (uint8x16_t __a
, mve_pred16_t __p
)
31980 return __arm_vrev64q_x_u8 (__a
, __p
);
31983 __extension__
extern __inline uint16x8_t
31984 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31985 __arm_vrev64q_x (uint16x8_t __a
, mve_pred16_t __p
)
31987 return __arm_vrev64q_x_u16 (__a
, __p
);
31990 __extension__
extern __inline uint32x4_t
31991 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31992 __arm_vrev64q_x (uint32x4_t __a
, mve_pred16_t __p
)
31994 return __arm_vrev64q_x_u32 (__a
, __p
);
31997 __extension__
extern __inline int8x16_t
31998 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
31999 __arm_vrshlq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
32001 return __arm_vrshlq_x_s8 (__a
, __b
, __p
);
32004 __extension__
extern __inline int16x8_t
32005 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32006 __arm_vrshlq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
32008 return __arm_vrshlq_x_s16 (__a
, __b
, __p
);
32011 __extension__
extern __inline int32x4_t
32012 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32013 __arm_vrshlq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
32015 return __arm_vrshlq_x_s32 (__a
, __b
, __p
);
32018 __extension__
extern __inline uint8x16_t
32019 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32020 __arm_vrshlq_x (uint8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
32022 return __arm_vrshlq_x_u8 (__a
, __b
, __p
);
32025 __extension__
extern __inline uint16x8_t
32026 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32027 __arm_vrshlq_x (uint16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
32029 return __arm_vrshlq_x_u16 (__a
, __b
, __p
);
32032 __extension__
extern __inline uint32x4_t
32033 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32034 __arm_vrshlq_x (uint32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
32036 return __arm_vrshlq_x_u32 (__a
, __b
, __p
);
32039 __extension__
extern __inline int16x8_t
32040 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32041 __arm_vshllbq_x (int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
32043 return __arm_vshllbq_x_n_s8 (__a
, __imm
, __p
);
32046 __extension__
extern __inline int32x4_t
32047 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32048 __arm_vshllbq_x (int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
32050 return __arm_vshllbq_x_n_s16 (__a
, __imm
, __p
);
32053 __extension__
extern __inline uint16x8_t
32054 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32055 __arm_vshllbq_x (uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
32057 return __arm_vshllbq_x_n_u8 (__a
, __imm
, __p
);
32060 __extension__
extern __inline uint32x4_t
32061 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32062 __arm_vshllbq_x (uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
32064 return __arm_vshllbq_x_n_u16 (__a
, __imm
, __p
);
32067 __extension__
extern __inline int16x8_t
32068 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32069 __arm_vshlltq_x (int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
32071 return __arm_vshlltq_x_n_s8 (__a
, __imm
, __p
);
32074 __extension__
extern __inline int32x4_t
32075 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32076 __arm_vshlltq_x (int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
32078 return __arm_vshlltq_x_n_s16 (__a
, __imm
, __p
);
32081 __extension__
extern __inline uint16x8_t
32082 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32083 __arm_vshlltq_x (uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
32085 return __arm_vshlltq_x_n_u8 (__a
, __imm
, __p
);
32088 __extension__
extern __inline uint32x4_t
32089 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32090 __arm_vshlltq_x (uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
32092 return __arm_vshlltq_x_n_u16 (__a
, __imm
, __p
);
32095 __extension__
extern __inline int8x16_t
32096 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32097 __arm_vshlq_x (int8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
32099 return __arm_vshlq_x_s8 (__a
, __b
, __p
);
32102 __extension__
extern __inline int16x8_t
32103 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32104 __arm_vshlq_x (int16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
32106 return __arm_vshlq_x_s16 (__a
, __b
, __p
);
32109 __extension__
extern __inline int32x4_t
32110 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32111 __arm_vshlq_x (int32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
32113 return __arm_vshlq_x_s32 (__a
, __b
, __p
);
32116 __extension__
extern __inline uint8x16_t
32117 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32118 __arm_vshlq_x (uint8x16_t __a
, int8x16_t __b
, mve_pred16_t __p
)
32120 return __arm_vshlq_x_u8 (__a
, __b
, __p
);
32123 __extension__
extern __inline uint16x8_t
32124 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32125 __arm_vshlq_x (uint16x8_t __a
, int16x8_t __b
, mve_pred16_t __p
)
32127 return __arm_vshlq_x_u16 (__a
, __b
, __p
);
32130 __extension__
extern __inline uint32x4_t
32131 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32132 __arm_vshlq_x (uint32x4_t __a
, int32x4_t __b
, mve_pred16_t __p
)
32134 return __arm_vshlq_x_u32 (__a
, __b
, __p
);
32137 __extension__
extern __inline int8x16_t
32138 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32139 __arm_vshlq_x_n (int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
32141 return __arm_vshlq_x_n_s8 (__a
, __imm
, __p
);
32144 __extension__
extern __inline int16x8_t
32145 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32146 __arm_vshlq_x_n (int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
32148 return __arm_vshlq_x_n_s16 (__a
, __imm
, __p
);
32151 __extension__
extern __inline int32x4_t
32152 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32153 __arm_vshlq_x_n (int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
32155 return __arm_vshlq_x_n_s32 (__a
, __imm
, __p
);
32158 __extension__
extern __inline uint8x16_t
32159 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32160 __arm_vshlq_x_n (uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
32162 return __arm_vshlq_x_n_u8 (__a
, __imm
, __p
);
32165 __extension__
extern __inline uint16x8_t
32166 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32167 __arm_vshlq_x_n (uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
32169 return __arm_vshlq_x_n_u16 (__a
, __imm
, __p
);
32172 __extension__
extern __inline uint32x4_t
32173 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32174 __arm_vshlq_x_n (uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
32176 return __arm_vshlq_x_n_u32 (__a
, __imm
, __p
);
32179 __extension__
extern __inline int8x16_t
32180 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32181 __arm_vrshrq_x (int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
32183 return __arm_vrshrq_x_n_s8 (__a
, __imm
, __p
);
32186 __extension__
extern __inline int16x8_t
32187 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32188 __arm_vrshrq_x (int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
32190 return __arm_vrshrq_x_n_s16 (__a
, __imm
, __p
);
32193 __extension__
extern __inline int32x4_t
32194 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32195 __arm_vrshrq_x (int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
32197 return __arm_vrshrq_x_n_s32 (__a
, __imm
, __p
);
32200 __extension__
extern __inline uint8x16_t
32201 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32202 __arm_vrshrq_x (uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
32204 return __arm_vrshrq_x_n_u8 (__a
, __imm
, __p
);
32207 __extension__
extern __inline uint16x8_t
32208 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32209 __arm_vrshrq_x (uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
32211 return __arm_vrshrq_x_n_u16 (__a
, __imm
, __p
);
32214 __extension__
extern __inline uint32x4_t
32215 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32216 __arm_vrshrq_x (uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
32218 return __arm_vrshrq_x_n_u32 (__a
, __imm
, __p
);
32221 __extension__
extern __inline int8x16_t
32222 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32223 __arm_vshrq_x (int8x16_t __a
, const int __imm
, mve_pred16_t __p
)
32225 return __arm_vshrq_x_n_s8 (__a
, __imm
, __p
);
32228 __extension__
extern __inline int16x8_t
32229 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32230 __arm_vshrq_x (int16x8_t __a
, const int __imm
, mve_pred16_t __p
)
32232 return __arm_vshrq_x_n_s16 (__a
, __imm
, __p
);
32235 __extension__
extern __inline int32x4_t
32236 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32237 __arm_vshrq_x (int32x4_t __a
, const int __imm
, mve_pred16_t __p
)
32239 return __arm_vshrq_x_n_s32 (__a
, __imm
, __p
);
32242 __extension__
extern __inline uint8x16_t
32243 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32244 __arm_vshrq_x (uint8x16_t __a
, const int __imm
, mve_pred16_t __p
)
32246 return __arm_vshrq_x_n_u8 (__a
, __imm
, __p
);
32249 __extension__
extern __inline uint16x8_t
32250 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32251 __arm_vshrq_x (uint16x8_t __a
, const int __imm
, mve_pred16_t __p
)
32253 return __arm_vshrq_x_n_u16 (__a
, __imm
, __p
);
32256 __extension__
extern __inline uint32x4_t
32257 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32258 __arm_vshrq_x (uint32x4_t __a
, const int __imm
, mve_pred16_t __p
)
32260 return __arm_vshrq_x_n_u32 (__a
, __imm
, __p
);
32263 __extension__
extern __inline int32x4_t
32264 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32265 __arm_vadciq (int32x4_t __a
, int32x4_t __b
, unsigned * __carry_out
)
32267 return __arm_vadciq_s32 (__a
, __b
, __carry_out
);
32270 __extension__
extern __inline uint32x4_t
32271 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32272 __arm_vadciq (uint32x4_t __a
, uint32x4_t __b
, unsigned * __carry_out
)
32274 return __arm_vadciq_u32 (__a
, __b
, __carry_out
);
32277 __extension__
extern __inline int32x4_t
32278 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32279 __arm_vadciq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, unsigned * __carry_out
, mve_pred16_t __p
)
32281 return __arm_vadciq_m_s32 (__inactive
, __a
, __b
, __carry_out
, __p
);
32284 __extension__
extern __inline uint32x4_t
32285 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32286 __arm_vadciq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, unsigned * __carry_out
, mve_pred16_t __p
)
32288 return __arm_vadciq_m_u32 (__inactive
, __a
, __b
, __carry_out
, __p
);
32291 __extension__
extern __inline int32x4_t
32292 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32293 __arm_vadcq (int32x4_t __a
, int32x4_t __b
, unsigned * __carry
)
32295 return __arm_vadcq_s32 (__a
, __b
, __carry
);
32298 __extension__
extern __inline uint32x4_t
32299 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32300 __arm_vadcq (uint32x4_t __a
, uint32x4_t __b
, unsigned * __carry
)
32302 return __arm_vadcq_u32 (__a
, __b
, __carry
);
32305 __extension__
extern __inline int32x4_t
32306 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32307 __arm_vadcq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, unsigned * __carry
, mve_pred16_t __p
)
32309 return __arm_vadcq_m_s32 (__inactive
, __a
, __b
, __carry
, __p
);
32312 __extension__
extern __inline uint32x4_t
32313 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32314 __arm_vadcq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, unsigned * __carry
, mve_pred16_t __p
)
32316 return __arm_vadcq_m_u32 (__inactive
, __a
, __b
, __carry
, __p
);
32319 __extension__
extern __inline int32x4_t
32320 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32321 __arm_vsbciq (int32x4_t __a
, int32x4_t __b
, unsigned * __carry_out
)
32323 return __arm_vsbciq_s32 (__a
, __b
, __carry_out
);
32326 __extension__
extern __inline uint32x4_t
32327 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32328 __arm_vsbciq (uint32x4_t __a
, uint32x4_t __b
, unsigned * __carry_out
)
32330 return __arm_vsbciq_u32 (__a
, __b
, __carry_out
);
32333 __extension__
extern __inline int32x4_t
32334 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32335 __arm_vsbciq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, unsigned * __carry_out
, mve_pred16_t __p
)
32337 return __arm_vsbciq_m_s32 (__inactive
, __a
, __b
, __carry_out
, __p
);
32340 __extension__
extern __inline uint32x4_t
32341 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32342 __arm_vsbciq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, unsigned * __carry_out
, mve_pred16_t __p
)
32344 return __arm_vsbciq_m_u32 (__inactive
, __a
, __b
, __carry_out
, __p
);
32347 __extension__
extern __inline int32x4_t
32348 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32349 __arm_vsbcq (int32x4_t __a
, int32x4_t __b
, unsigned * __carry
)
32351 return __arm_vsbcq_s32 (__a
, __b
, __carry
);
32354 __extension__
extern __inline uint32x4_t
32355 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32356 __arm_vsbcq (uint32x4_t __a
, uint32x4_t __b
, unsigned * __carry
)
32358 return __arm_vsbcq_u32 (__a
, __b
, __carry
);
32361 __extension__
extern __inline int32x4_t
32362 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32363 __arm_vsbcq_m (int32x4_t __inactive
, int32x4_t __a
, int32x4_t __b
, unsigned * __carry
, mve_pred16_t __p
)
32365 return __arm_vsbcq_m_s32 (__inactive
, __a
, __b
, __carry
, __p
);
32368 __extension__
extern __inline uint32x4_t
32369 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32370 __arm_vsbcq_m (uint32x4_t __inactive
, uint32x4_t __a
, uint32x4_t __b
, unsigned * __carry
, mve_pred16_t __p
)
32372 return __arm_vsbcq_m_u32 (__inactive
, __a
, __b
, __carry
, __p
);
32375 __extension__
extern __inline
void
32376 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32377 __arm_vst1q_p (uint8_t * __addr
, uint8x16_t __value
, mve_pred16_t __p
)
32379 __arm_vst1q_p_u8 (__addr
, __value
, __p
);
32382 __extension__
extern __inline
void
32383 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32384 __arm_vst1q_p (int8_t * __addr
, int8x16_t __value
, mve_pred16_t __p
)
32386 __arm_vst1q_p_s8 (__addr
, __value
, __p
);
32389 __extension__
extern __inline
void
32390 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32391 __arm_vst2q (int8_t * __addr
, int8x16x2_t __value
)
32393 __arm_vst2q_s8 (__addr
, __value
);
32396 __extension__
extern __inline
void
32397 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32398 __arm_vst2q (uint8_t * __addr
, uint8x16x2_t __value
)
32400 __arm_vst2q_u8 (__addr
, __value
);
32403 __extension__
extern __inline uint8x16_t
32404 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32405 __arm_vld1q_z (uint8_t const *__base
, mve_pred16_t __p
)
32407 return __arm_vld1q_z_u8 (__base
, __p
);
32410 __extension__
extern __inline int8x16_t
32411 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32412 __arm_vld1q_z (int8_t const *__base
, mve_pred16_t __p
)
32414 return __arm_vld1q_z_s8 (__base
, __p
);
32417 __extension__
extern __inline int8x16x2_t
32418 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32419 __arm_vld2q (int8_t const * __addr
)
32421 return __arm_vld2q_s8 (__addr
);
32424 __extension__
extern __inline uint8x16x2_t
32425 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32426 __arm_vld2q (uint8_t const * __addr
)
32428 return __arm_vld2q_u8 (__addr
);
32431 __extension__
extern __inline int8x16x4_t
32432 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32433 __arm_vld4q (int8_t const * __addr
)
32435 return __arm_vld4q_s8 (__addr
);
32438 __extension__
extern __inline uint8x16x4_t
32439 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32440 __arm_vld4q (uint8_t const * __addr
)
32442 return __arm_vld4q_u8 (__addr
);
32445 __extension__
extern __inline
void
32446 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32447 __arm_vst1q_p (uint16_t * __addr
, uint16x8_t __value
, mve_pred16_t __p
)
32449 __arm_vst1q_p_u16 (__addr
, __value
, __p
);
32452 __extension__
extern __inline
void
32453 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32454 __arm_vst1q_p (int16_t * __addr
, int16x8_t __value
, mve_pred16_t __p
)
32456 __arm_vst1q_p_s16 (__addr
, __value
, __p
);
32459 __extension__
extern __inline
void
32460 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32461 __arm_vst2q (int16_t * __addr
, int16x8x2_t __value
)
32463 __arm_vst2q_s16 (__addr
, __value
);
32466 __extension__
extern __inline
void
32467 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32468 __arm_vst2q (uint16_t * __addr
, uint16x8x2_t __value
)
32470 __arm_vst2q_u16 (__addr
, __value
);
32473 __extension__
extern __inline uint16x8_t
32474 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32475 __arm_vld1q_z (uint16_t const *__base
, mve_pred16_t __p
)
32477 return __arm_vld1q_z_u16 (__base
, __p
);
32480 __extension__
extern __inline int16x8_t
32481 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32482 __arm_vld1q_z (int16_t const *__base
, mve_pred16_t __p
)
32484 return __arm_vld1q_z_s16 (__base
, __p
);
32487 __extension__
extern __inline int16x8x2_t
32488 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32489 __arm_vld2q (int16_t const * __addr
)
32491 return __arm_vld2q_s16 (__addr
);
32494 __extension__
extern __inline uint16x8x2_t
32495 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32496 __arm_vld2q (uint16_t const * __addr
)
32498 return __arm_vld2q_u16 (__addr
);
32501 __extension__
extern __inline int16x8x4_t
32502 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32503 __arm_vld4q (int16_t const * __addr
)
32505 return __arm_vld4q_s16 (__addr
);
32508 __extension__
extern __inline uint16x8x4_t
32509 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32510 __arm_vld4q (uint16_t const * __addr
)
32512 return __arm_vld4q_u16 (__addr
);
32515 __extension__
extern __inline
void
32516 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32517 __arm_vst1q_p (uint32_t * __addr
, uint32x4_t __value
, mve_pred16_t __p
)
32519 __arm_vst1q_p_u32 (__addr
, __value
, __p
);
32522 __extension__
extern __inline
void
32523 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32524 __arm_vst1q_p (int32_t * __addr
, int32x4_t __value
, mve_pred16_t __p
)
32526 __arm_vst1q_p_s32 (__addr
, __value
, __p
);
32529 __extension__
extern __inline
void
32530 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32531 __arm_vst2q (int32_t * __addr
, int32x4x2_t __value
)
32533 __arm_vst2q_s32 (__addr
, __value
);
32536 __extension__
extern __inline
void
32537 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32538 __arm_vst2q (uint32_t * __addr
, uint32x4x2_t __value
)
32540 __arm_vst2q_u32 (__addr
, __value
);
32543 __extension__
extern __inline uint32x4_t
32544 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32545 __arm_vld1q_z (uint32_t const *__base
, mve_pred16_t __p
)
32547 return __arm_vld1q_z_u32 (__base
, __p
);
32550 __extension__
extern __inline int32x4_t
32551 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32552 __arm_vld1q_z (int32_t const *__base
, mve_pred16_t __p
)
32554 return __arm_vld1q_z_s32 (__base
, __p
);
32557 __extension__
extern __inline int32x4x2_t
32558 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32559 __arm_vld2q (int32_t const * __addr
)
32561 return __arm_vld2q_s32 (__addr
);
32564 __extension__
extern __inline uint32x4x2_t
32565 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32566 __arm_vld2q (uint32_t const * __addr
)
32568 return __arm_vld2q_u32 (__addr
);
32571 __extension__
extern __inline int32x4x4_t
32572 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32573 __arm_vld4q (int32_t const * __addr
)
32575 return __arm_vld4q_s32 (__addr
);
32578 __extension__
extern __inline uint32x4x4_t
32579 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32580 __arm_vld4q (uint32_t const * __addr
)
32582 return __arm_vld4q_u32 (__addr
);
32585 __extension__
extern __inline int16x8_t
32586 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32587 __arm_vsetq_lane (int16_t __a
, int16x8_t __b
, const int __idx
)
32589 return __arm_vsetq_lane_s16 (__a
, __b
, __idx
);
32592 __extension__
extern __inline int32x4_t
32593 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32594 __arm_vsetq_lane (int32_t __a
, int32x4_t __b
, const int __idx
)
32596 return __arm_vsetq_lane_s32 (__a
, __b
, __idx
);
32599 __extension__
extern __inline int8x16_t
32600 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32601 __arm_vsetq_lane (int8_t __a
, int8x16_t __b
, const int __idx
)
32603 return __arm_vsetq_lane_s8 (__a
, __b
, __idx
);
32606 __extension__
extern __inline int64x2_t
32607 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32608 __arm_vsetq_lane (int64_t __a
, int64x2_t __b
, const int __idx
)
32610 return __arm_vsetq_lane_s64 (__a
, __b
, __idx
);
32613 __extension__
extern __inline uint8x16_t
32614 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32615 __arm_vsetq_lane (uint8_t __a
, uint8x16_t __b
, const int __idx
)
32617 return __arm_vsetq_lane_u8 (__a
, __b
, __idx
);
32620 __extension__
extern __inline uint16x8_t
32621 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32622 __arm_vsetq_lane (uint16_t __a
, uint16x8_t __b
, const int __idx
)
32624 return __arm_vsetq_lane_u16 (__a
, __b
, __idx
);
32627 __extension__
extern __inline uint32x4_t
32628 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32629 __arm_vsetq_lane (uint32_t __a
, uint32x4_t __b
, const int __idx
)
32631 return __arm_vsetq_lane_u32 (__a
, __b
, __idx
);
32634 __extension__
extern __inline uint64x2_t
32635 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32636 __arm_vsetq_lane (uint64_t __a
, uint64x2_t __b
, const int __idx
)
32638 return __arm_vsetq_lane_u64 (__a
, __b
, __idx
);
32641 __extension__
extern __inline
int16_t
32642 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32643 __arm_vgetq_lane (int16x8_t __a
, const int __idx
)
32645 return __arm_vgetq_lane_s16 (__a
, __idx
);
32648 __extension__
extern __inline
int32_t
32649 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32650 __arm_vgetq_lane (int32x4_t __a
, const int __idx
)
32652 return __arm_vgetq_lane_s32 (__a
, __idx
);
32655 __extension__
extern __inline
int8_t
32656 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32657 __arm_vgetq_lane (int8x16_t __a
, const int __idx
)
32659 return __arm_vgetq_lane_s8 (__a
, __idx
);
32662 __extension__
extern __inline
int64_t
32663 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32664 __arm_vgetq_lane (int64x2_t __a
, const int __idx
)
32666 return __arm_vgetq_lane_s64 (__a
, __idx
);
32669 __extension__
extern __inline
uint8_t
32670 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32671 __arm_vgetq_lane (uint8x16_t __a
, const int __idx
)
32673 return __arm_vgetq_lane_u8 (__a
, __idx
);
32676 __extension__
extern __inline
uint16_t
32677 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32678 __arm_vgetq_lane (uint16x8_t __a
, const int __idx
)
32680 return __arm_vgetq_lane_u16 (__a
, __idx
);
32683 __extension__
extern __inline
uint32_t
32684 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32685 __arm_vgetq_lane (uint32x4_t __a
, const int __idx
)
32687 return __arm_vgetq_lane_u32 (__a
, __idx
);
32690 __extension__
extern __inline
uint64_t
32691 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32692 __arm_vgetq_lane (uint64x2_t __a
, const int __idx
)
32694 return __arm_vgetq_lane_u64 (__a
, __idx
);
32697 __extension__
extern __inline int8x16_t
32698 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32699 __arm_vshlcq_m (int8x16_t __a
, uint32_t * __b
, const int __imm
, mve_pred16_t __p
)
32701 return __arm_vshlcq_m_s8 (__a
, __b
, __imm
, __p
);
32704 __extension__
extern __inline uint8x16_t
32705 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32706 __arm_vshlcq_m (uint8x16_t __a
, uint32_t * __b
, const int __imm
, mve_pred16_t __p
)
32708 return __arm_vshlcq_m_u8 (__a
, __b
, __imm
, __p
);
32711 __extension__
extern __inline int16x8_t
32712 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32713 __arm_vshlcq_m (int16x8_t __a
, uint32_t * __b
, const int __imm
, mve_pred16_t __p
)
32715 return __arm_vshlcq_m_s16 (__a
, __b
, __imm
, __p
);
32718 __extension__
extern __inline uint16x8_t
32719 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32720 __arm_vshlcq_m (uint16x8_t __a
, uint32_t * __b
, const int __imm
, mve_pred16_t __p
)
32722 return __arm_vshlcq_m_u16 (__a
, __b
, __imm
, __p
);
32725 __extension__
extern __inline int32x4_t
32726 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32727 __arm_vshlcq_m (int32x4_t __a
, uint32_t * __b
, const int __imm
, mve_pred16_t __p
)
32729 return __arm_vshlcq_m_s32 (__a
, __b
, __imm
, __p
);
32732 __extension__
extern __inline uint32x4_t
32733 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32734 __arm_vshlcq_m (uint32x4_t __a
, uint32_t * __b
, const int __imm
, mve_pred16_t __p
)
32736 return __arm_vshlcq_m_u32 (__a
, __b
, __imm
, __p
);
32739 #if (__ARM_FEATURE_MVE & 2) /* MVE Floating point. */
32741 __extension__
extern __inline
void
32742 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32743 __arm_vst4q (float16_t
* __addr
, float16x8x4_t __value
)
32745 __arm_vst4q_f16 (__addr
, __value
);
32748 __extension__
extern __inline
void
32749 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32750 __arm_vst4q (float32_t
* __addr
, float32x4x4_t __value
)
32752 __arm_vst4q_f32 (__addr
, __value
);
32755 __extension__
extern __inline float16x8_t
32756 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32757 __arm_vrndxq (float16x8_t __a
)
32759 return __arm_vrndxq_f16 (__a
);
32762 __extension__
extern __inline float32x4_t
32763 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32764 __arm_vrndxq (float32x4_t __a
)
32766 return __arm_vrndxq_f32 (__a
);
32769 __extension__
extern __inline float16x8_t
32770 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32771 __arm_vrndq (float16x8_t __a
)
32773 return __arm_vrndq_f16 (__a
);
32776 __extension__
extern __inline float32x4_t
32777 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32778 __arm_vrndq (float32x4_t __a
)
32780 return __arm_vrndq_f32 (__a
);
32783 __extension__
extern __inline float16x8_t
32784 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32785 __arm_vrndpq (float16x8_t __a
)
32787 return __arm_vrndpq_f16 (__a
);
32790 __extension__
extern __inline float32x4_t
32791 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32792 __arm_vrndpq (float32x4_t __a
)
32794 return __arm_vrndpq_f32 (__a
);
32797 __extension__
extern __inline float16x8_t
32798 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32799 __arm_vrndnq (float16x8_t __a
)
32801 return __arm_vrndnq_f16 (__a
);
32804 __extension__
extern __inline float32x4_t
32805 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32806 __arm_vrndnq (float32x4_t __a
)
32808 return __arm_vrndnq_f32 (__a
);
32811 __extension__
extern __inline float16x8_t
32812 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32813 __arm_vrndmq (float16x8_t __a
)
32815 return __arm_vrndmq_f16 (__a
);
32818 __extension__
extern __inline float32x4_t
32819 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32820 __arm_vrndmq (float32x4_t __a
)
32822 return __arm_vrndmq_f32 (__a
);
32825 __extension__
extern __inline float16x8_t
32826 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32827 __arm_vrndaq (float16x8_t __a
)
32829 return __arm_vrndaq_f16 (__a
);
32832 __extension__
extern __inline float32x4_t
32833 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32834 __arm_vrndaq (float32x4_t __a
)
32836 return __arm_vrndaq_f32 (__a
);
32839 __extension__
extern __inline float16x8_t
32840 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32841 __arm_vrev64q (float16x8_t __a
)
32843 return __arm_vrev64q_f16 (__a
);
32846 __extension__
extern __inline float32x4_t
32847 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32848 __arm_vrev64q (float32x4_t __a
)
32850 return __arm_vrev64q_f32 (__a
);
32853 __extension__
extern __inline float16x8_t
32854 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32855 __arm_vnegq (float16x8_t __a
)
32857 return __arm_vnegq_f16 (__a
);
32860 __extension__
extern __inline float32x4_t
32861 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32862 __arm_vnegq (float32x4_t __a
)
32864 return __arm_vnegq_f32 (__a
);
32867 __extension__
extern __inline float16x8_t
32868 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32869 __arm_vdupq_n (float16_t __a
)
32871 return __arm_vdupq_n_f16 (__a
);
32874 __extension__
extern __inline float32x4_t
32875 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32876 __arm_vdupq_n (float32_t __a
)
32878 return __arm_vdupq_n_f32 (__a
);
32881 __extension__
extern __inline float16x8_t
32882 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32883 __arm_vabsq (float16x8_t __a
)
32885 return __arm_vabsq_f16 (__a
);
32888 __extension__
extern __inline float32x4_t
32889 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32890 __arm_vabsq (float32x4_t __a
)
32892 return __arm_vabsq_f32 (__a
);
32895 __extension__
extern __inline float16x8_t
32896 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32897 __arm_vrev32q (float16x8_t __a
)
32899 return __arm_vrev32q_f16 (__a
);
32902 __extension__
extern __inline float32x4_t
32903 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32904 __arm_vcvttq_f32 (float16x8_t __a
)
32906 return __arm_vcvttq_f32_f16 (__a
);
32909 __extension__
extern __inline float32x4_t
32910 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32911 __arm_vcvtbq_f32 (float16x8_t __a
)
32913 return __arm_vcvtbq_f32_f16 (__a
);
32916 __extension__
extern __inline float16x8_t
32917 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32918 __arm_vcvtq (int16x8_t __a
)
32920 return __arm_vcvtq_f16_s16 (__a
);
32923 __extension__
extern __inline float32x4_t
32924 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32925 __arm_vcvtq (int32x4_t __a
)
32927 return __arm_vcvtq_f32_s32 (__a
);
32930 __extension__
extern __inline float16x8_t
32931 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32932 __arm_vcvtq (uint16x8_t __a
)
32934 return __arm_vcvtq_f16_u16 (__a
);
32937 __extension__
extern __inline float32x4_t
32938 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32939 __arm_vcvtq (uint32x4_t __a
)
32941 return __arm_vcvtq_f32_u32 (__a
);
32944 __extension__
extern __inline float16x8_t
32945 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32946 __arm_vsubq (float16x8_t __a
, float16_t __b
)
32948 return __arm_vsubq_n_f16 (__a
, __b
);
32951 __extension__
extern __inline float32x4_t
32952 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32953 __arm_vsubq (float32x4_t __a
, float32_t __b
)
32955 return __arm_vsubq_n_f32 (__a
, __b
);
32958 __extension__
extern __inline float16x8_t
32959 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32960 __arm_vbrsrq (float16x8_t __a
, int32_t __b
)
32962 return __arm_vbrsrq_n_f16 (__a
, __b
);
32965 __extension__
extern __inline float32x4_t
32966 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32967 __arm_vbrsrq (float32x4_t __a
, int32_t __b
)
32969 return __arm_vbrsrq_n_f32 (__a
, __b
);
32972 __extension__
extern __inline float16x8_t
32973 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32974 __arm_vcvtq_n (int16x8_t __a
, const int __imm6
)
32976 return __arm_vcvtq_n_f16_s16 (__a
, __imm6
);
32979 __extension__
extern __inline float32x4_t
32980 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32981 __arm_vcvtq_n (int32x4_t __a
, const int __imm6
)
32983 return __arm_vcvtq_n_f32_s32 (__a
, __imm6
);
32986 __extension__
extern __inline float16x8_t
32987 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32988 __arm_vcvtq_n (uint16x8_t __a
, const int __imm6
)
32990 return __arm_vcvtq_n_f16_u16 (__a
, __imm6
);
32993 __extension__
extern __inline float32x4_t
32994 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
32995 __arm_vcvtq_n (uint32x4_t __a
, const int __imm6
)
32997 return __arm_vcvtq_n_f32_u32 (__a
, __imm6
);
33000 __extension__
extern __inline mve_pred16_t
33001 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33002 __arm_vcmpneq (float16x8_t __a
, float16_t __b
)
33004 return __arm_vcmpneq_n_f16 (__a
, __b
);
33007 __extension__
extern __inline mve_pred16_t
33008 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33009 __arm_vcmpneq (float16x8_t __a
, float16x8_t __b
)
33011 return __arm_vcmpneq_f16 (__a
, __b
);
33014 __extension__
extern __inline mve_pred16_t
33015 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33016 __arm_vcmpltq (float16x8_t __a
, float16_t __b
)
33018 return __arm_vcmpltq_n_f16 (__a
, __b
);
33021 __extension__
extern __inline mve_pred16_t
33022 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33023 __arm_vcmpltq (float16x8_t __a
, float16x8_t __b
)
33025 return __arm_vcmpltq_f16 (__a
, __b
);
33028 __extension__
extern __inline mve_pred16_t
33029 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33030 __arm_vcmpleq (float16x8_t __a
, float16_t __b
)
33032 return __arm_vcmpleq_n_f16 (__a
, __b
);
33035 __extension__
extern __inline mve_pred16_t
33036 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33037 __arm_vcmpleq (float16x8_t __a
, float16x8_t __b
)
33039 return __arm_vcmpleq_f16 (__a
, __b
);
33042 __extension__
extern __inline mve_pred16_t
33043 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33044 __arm_vcmpgtq (float16x8_t __a
, float16_t __b
)
33046 return __arm_vcmpgtq_n_f16 (__a
, __b
);
33049 __extension__
extern __inline mve_pred16_t
33050 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33051 __arm_vcmpgtq (float16x8_t __a
, float16x8_t __b
)
33053 return __arm_vcmpgtq_f16 (__a
, __b
);
33056 __extension__
extern __inline mve_pred16_t
33057 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33058 __arm_vcmpgeq (float16x8_t __a
, float16_t __b
)
33060 return __arm_vcmpgeq_n_f16 (__a
, __b
);
33063 __extension__
extern __inline mve_pred16_t
33064 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33065 __arm_vcmpgeq (float16x8_t __a
, float16x8_t __b
)
33067 return __arm_vcmpgeq_f16 (__a
, __b
);
33070 __extension__
extern __inline mve_pred16_t
33071 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33072 __arm_vcmpeqq (float16x8_t __a
, float16_t __b
)
33074 return __arm_vcmpeqq_n_f16 (__a
, __b
);
33077 __extension__
extern __inline mve_pred16_t
33078 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33079 __arm_vcmpeqq (float16x8_t __a
, float16x8_t __b
)
33081 return __arm_vcmpeqq_f16 (__a
, __b
);
33084 __extension__
extern __inline float16x8_t
33085 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33086 __arm_vsubq (float16x8_t __a
, float16x8_t __b
)
33088 return __arm_vsubq_f16 (__a
, __b
);
33091 __extension__
extern __inline float16x8_t
33092 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33093 __arm_vorrq (float16x8_t __a
, float16x8_t __b
)
33095 return __arm_vorrq_f16 (__a
, __b
);
33098 __extension__
extern __inline float16x8_t
33099 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33100 __arm_vornq (float16x8_t __a
, float16x8_t __b
)
33102 return __arm_vornq_f16 (__a
, __b
);
33105 __extension__
extern __inline float16x8_t
33106 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33107 __arm_vmulq (float16x8_t __a
, float16_t __b
)
33109 return __arm_vmulq_n_f16 (__a
, __b
);
33112 __extension__
extern __inline float16x8_t
33113 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33114 __arm_vmulq (float16x8_t __a
, float16x8_t __b
)
33116 return __arm_vmulq_f16 (__a
, __b
);
33119 __extension__
extern __inline float16_t
33120 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33121 __arm_vminnmvq (float16_t __a
, float16x8_t __b
)
33123 return __arm_vminnmvq_f16 (__a
, __b
);
33126 __extension__
extern __inline float16x8_t
33127 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33128 __arm_vminnmq (float16x8_t __a
, float16x8_t __b
)
33130 return __arm_vminnmq_f16 (__a
, __b
);
33133 __extension__
extern __inline float16_t
33134 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33135 __arm_vminnmavq (float16_t __a
, float16x8_t __b
)
33137 return __arm_vminnmavq_f16 (__a
, __b
);
33140 __extension__
extern __inline float16x8_t
33141 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33142 __arm_vminnmaq (float16x8_t __a
, float16x8_t __b
)
33144 return __arm_vminnmaq_f16 (__a
, __b
);
33147 __extension__
extern __inline float16_t
33148 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33149 __arm_vmaxnmvq (float16_t __a
, float16x8_t __b
)
33151 return __arm_vmaxnmvq_f16 (__a
, __b
);
33154 __extension__
extern __inline float16x8_t
33155 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33156 __arm_vmaxnmq (float16x8_t __a
, float16x8_t __b
)
33158 return __arm_vmaxnmq_f16 (__a
, __b
);
33161 __extension__
extern __inline float16_t
33162 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33163 __arm_vmaxnmavq (float16_t __a
, float16x8_t __b
)
33165 return __arm_vmaxnmavq_f16 (__a
, __b
);
33168 __extension__
extern __inline float16x8_t
33169 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33170 __arm_vmaxnmaq (float16x8_t __a
, float16x8_t __b
)
33172 return __arm_vmaxnmaq_f16 (__a
, __b
);
33175 __extension__
extern __inline float16x8_t
33176 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33177 __arm_veorq (float16x8_t __a
, float16x8_t __b
)
33179 return __arm_veorq_f16 (__a
, __b
);
33182 __extension__
extern __inline float16x8_t
33183 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33184 __arm_vcmulq_rot90 (float16x8_t __a
, float16x8_t __b
)
33186 return __arm_vcmulq_rot90_f16 (__a
, __b
);
33189 __extension__
extern __inline float16x8_t
33190 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33191 __arm_vcmulq_rot270 (float16x8_t __a
, float16x8_t __b
)
33193 return __arm_vcmulq_rot270_f16 (__a
, __b
);
33196 __extension__
extern __inline float16x8_t
33197 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33198 __arm_vcmulq_rot180 (float16x8_t __a
, float16x8_t __b
)
33200 return __arm_vcmulq_rot180_f16 (__a
, __b
);
33203 __extension__
extern __inline float16x8_t
33204 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33205 __arm_vcmulq (float16x8_t __a
, float16x8_t __b
)
33207 return __arm_vcmulq_f16 (__a
, __b
);
33210 __extension__
extern __inline float16x8_t
33211 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33212 __arm_vcaddq_rot90 (float16x8_t __a
, float16x8_t __b
)
33214 return __arm_vcaddq_rot90_f16 (__a
, __b
);
33217 __extension__
extern __inline float16x8_t
33218 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33219 __arm_vcaddq_rot270 (float16x8_t __a
, float16x8_t __b
)
33221 return __arm_vcaddq_rot270_f16 (__a
, __b
);
33224 __extension__
extern __inline float16x8_t
33225 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33226 __arm_vbicq (float16x8_t __a
, float16x8_t __b
)
33228 return __arm_vbicq_f16 (__a
, __b
);
33231 __extension__
extern __inline float16x8_t
33232 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33233 __arm_vandq (float16x8_t __a
, float16x8_t __b
)
33235 return __arm_vandq_f16 (__a
, __b
);
33238 __extension__
extern __inline float16x8_t
33239 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33240 __arm_vaddq (float16x8_t __a
, float16_t __b
)
33242 return __arm_vaddq_n_f16 (__a
, __b
);
33245 __extension__
extern __inline float16x8_t
33246 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33247 __arm_vabdq (float16x8_t __a
, float16x8_t __b
)
33249 return __arm_vabdq_f16 (__a
, __b
);
33252 __extension__
extern __inline mve_pred16_t
33253 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33254 __arm_vcmpneq (float32x4_t __a
, float32_t __b
)
33256 return __arm_vcmpneq_n_f32 (__a
, __b
);
33259 __extension__
extern __inline mve_pred16_t
33260 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33261 __arm_vcmpneq (float32x4_t __a
, float32x4_t __b
)
33263 return __arm_vcmpneq_f32 (__a
, __b
);
33266 __extension__
extern __inline mve_pred16_t
33267 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33268 __arm_vcmpltq (float32x4_t __a
, float32_t __b
)
33270 return __arm_vcmpltq_n_f32 (__a
, __b
);
33273 __extension__
extern __inline mve_pred16_t
33274 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33275 __arm_vcmpltq (float32x4_t __a
, float32x4_t __b
)
33277 return __arm_vcmpltq_f32 (__a
, __b
);
33280 __extension__
extern __inline mve_pred16_t
33281 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33282 __arm_vcmpleq (float32x4_t __a
, float32_t __b
)
33284 return __arm_vcmpleq_n_f32 (__a
, __b
);
33287 __extension__
extern __inline mve_pred16_t
33288 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33289 __arm_vcmpleq (float32x4_t __a
, float32x4_t __b
)
33291 return __arm_vcmpleq_f32 (__a
, __b
);
33294 __extension__
extern __inline mve_pred16_t
33295 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33296 __arm_vcmpgtq (float32x4_t __a
, float32_t __b
)
33298 return __arm_vcmpgtq_n_f32 (__a
, __b
);
33301 __extension__
extern __inline mve_pred16_t
33302 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33303 __arm_vcmpgtq (float32x4_t __a
, float32x4_t __b
)
33305 return __arm_vcmpgtq_f32 (__a
, __b
);
33308 __extension__
extern __inline mve_pred16_t
33309 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33310 __arm_vcmpgeq (float32x4_t __a
, float32_t __b
)
33312 return __arm_vcmpgeq_n_f32 (__a
, __b
);
33315 __extension__
extern __inline mve_pred16_t
33316 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33317 __arm_vcmpgeq (float32x4_t __a
, float32x4_t __b
)
33319 return __arm_vcmpgeq_f32 (__a
, __b
);
33322 __extension__
extern __inline mve_pred16_t
33323 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33324 __arm_vcmpeqq (float32x4_t __a
, float32_t __b
)
33326 return __arm_vcmpeqq_n_f32 (__a
, __b
);
33329 __extension__
extern __inline mve_pred16_t
33330 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33331 __arm_vcmpeqq (float32x4_t __a
, float32x4_t __b
)
33333 return __arm_vcmpeqq_f32 (__a
, __b
);
33336 __extension__
extern __inline float32x4_t
33337 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33338 __arm_vsubq (float32x4_t __a
, float32x4_t __b
)
33340 return __arm_vsubq_f32 (__a
, __b
);
33343 __extension__
extern __inline float32x4_t
33344 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33345 __arm_vorrq (float32x4_t __a
, float32x4_t __b
)
33347 return __arm_vorrq_f32 (__a
, __b
);
33350 __extension__
extern __inline float32x4_t
33351 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33352 __arm_vornq (float32x4_t __a
, float32x4_t __b
)
33354 return __arm_vornq_f32 (__a
, __b
);
33357 __extension__
extern __inline float32x4_t
33358 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33359 __arm_vmulq (float32x4_t __a
, float32_t __b
)
33361 return __arm_vmulq_n_f32 (__a
, __b
);
33364 __extension__
extern __inline float32x4_t
33365 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33366 __arm_vmulq (float32x4_t __a
, float32x4_t __b
)
33368 return __arm_vmulq_f32 (__a
, __b
);
33371 __extension__
extern __inline float32_t
33372 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33373 __arm_vminnmvq (float32_t __a
, float32x4_t __b
)
33375 return __arm_vminnmvq_f32 (__a
, __b
);
33378 __extension__
extern __inline float32x4_t
33379 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33380 __arm_vminnmq (float32x4_t __a
, float32x4_t __b
)
33382 return __arm_vminnmq_f32 (__a
, __b
);
33385 __extension__
extern __inline float32_t
33386 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33387 __arm_vminnmavq (float32_t __a
, float32x4_t __b
)
33389 return __arm_vminnmavq_f32 (__a
, __b
);
33392 __extension__
extern __inline float32x4_t
33393 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33394 __arm_vminnmaq (float32x4_t __a
, float32x4_t __b
)
33396 return __arm_vminnmaq_f32 (__a
, __b
);
33399 __extension__
extern __inline float32_t
33400 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33401 __arm_vmaxnmvq (float32_t __a
, float32x4_t __b
)
33403 return __arm_vmaxnmvq_f32 (__a
, __b
);
33406 __extension__
extern __inline float32x4_t
33407 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33408 __arm_vmaxnmq (float32x4_t __a
, float32x4_t __b
)
33410 return __arm_vmaxnmq_f32 (__a
, __b
);
33413 __extension__
extern __inline float32_t
33414 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33415 __arm_vmaxnmavq (float32_t __a
, float32x4_t __b
)
33417 return __arm_vmaxnmavq_f32 (__a
, __b
);
33420 __extension__
extern __inline float32x4_t
33421 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33422 __arm_vmaxnmaq (float32x4_t __a
, float32x4_t __b
)
33424 return __arm_vmaxnmaq_f32 (__a
, __b
);
33427 __extension__
extern __inline float32x4_t
33428 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33429 __arm_veorq (float32x4_t __a
, float32x4_t __b
)
33431 return __arm_veorq_f32 (__a
, __b
);
33434 __extension__
extern __inline float32x4_t
33435 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33436 __arm_vcmulq_rot90 (float32x4_t __a
, float32x4_t __b
)
33438 return __arm_vcmulq_rot90_f32 (__a
, __b
);
33441 __extension__
extern __inline float32x4_t
33442 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33443 __arm_vcmulq_rot270 (float32x4_t __a
, float32x4_t __b
)
33445 return __arm_vcmulq_rot270_f32 (__a
, __b
);
33448 __extension__
extern __inline float32x4_t
33449 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33450 __arm_vcmulq_rot180 (float32x4_t __a
, float32x4_t __b
)
33452 return __arm_vcmulq_rot180_f32 (__a
, __b
);
33455 __extension__
extern __inline float32x4_t
33456 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33457 __arm_vcmulq (float32x4_t __a
, float32x4_t __b
)
33459 return __arm_vcmulq_f32 (__a
, __b
);
33462 __extension__
extern __inline float32x4_t
33463 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33464 __arm_vcaddq_rot90 (float32x4_t __a
, float32x4_t __b
)
33466 return __arm_vcaddq_rot90_f32 (__a
, __b
);
33469 __extension__
extern __inline float32x4_t
33470 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33471 __arm_vcaddq_rot270 (float32x4_t __a
, float32x4_t __b
)
33473 return __arm_vcaddq_rot270_f32 (__a
, __b
);
33476 __extension__
extern __inline float32x4_t
33477 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33478 __arm_vbicq (float32x4_t __a
, float32x4_t __b
)
33480 return __arm_vbicq_f32 (__a
, __b
);
33483 __extension__
extern __inline float32x4_t
33484 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33485 __arm_vandq (float32x4_t __a
, float32x4_t __b
)
33487 return __arm_vandq_f32 (__a
, __b
);
33490 __extension__
extern __inline float32x4_t
33491 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33492 __arm_vaddq (float32x4_t __a
, float32_t __b
)
33494 return __arm_vaddq_n_f32 (__a
, __b
);
33497 __extension__
extern __inline float32x4_t
33498 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33499 __arm_vabdq (float32x4_t __a
, float32x4_t __b
)
33501 return __arm_vabdq_f32 (__a
, __b
);
33504 __extension__
extern __inline mve_pred16_t
33505 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33506 __arm_vcmpeqq_m (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
33508 return __arm_vcmpeqq_m_f16 (__a
, __b
, __p
);
33511 __extension__
extern __inline mve_pred16_t
33512 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33513 __arm_vcmpeqq_m (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
33515 return __arm_vcmpeqq_m_f32 (__a
, __b
, __p
);
33518 __extension__
extern __inline int16x8_t
33519 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33520 __arm_vcvtaq_m (int16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33522 return __arm_vcvtaq_m_s16_f16 (__inactive
, __a
, __p
);
33525 __extension__
extern __inline uint16x8_t
33526 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33527 __arm_vcvtaq_m (uint16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33529 return __arm_vcvtaq_m_u16_f16 (__inactive
, __a
, __p
);
33532 __extension__
extern __inline int32x4_t
33533 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33534 __arm_vcvtaq_m (int32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
33536 return __arm_vcvtaq_m_s32_f32 (__inactive
, __a
, __p
);
33539 __extension__
extern __inline uint32x4_t
33540 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33541 __arm_vcvtaq_m (uint32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
33543 return __arm_vcvtaq_m_u32_f32 (__inactive
, __a
, __p
);
33546 __extension__
extern __inline float16x8_t
33547 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33548 __arm_vcvtq_m (float16x8_t __inactive
, int16x8_t __a
, mve_pred16_t __p
)
33550 return __arm_vcvtq_m_f16_s16 (__inactive
, __a
, __p
);
33553 __extension__
extern __inline float16x8_t
33554 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33555 __arm_vcvtq_m (float16x8_t __inactive
, uint16x8_t __a
, mve_pred16_t __p
)
33557 return __arm_vcvtq_m_f16_u16 (__inactive
, __a
, __p
);
33560 __extension__
extern __inline float32x4_t
33561 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33562 __arm_vcvtq_m (float32x4_t __inactive
, int32x4_t __a
, mve_pred16_t __p
)
33564 return __arm_vcvtq_m_f32_s32 (__inactive
, __a
, __p
);
33567 __extension__
extern __inline float32x4_t
33568 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33569 __arm_vcvtq_m (float32x4_t __inactive
, uint32x4_t __a
, mve_pred16_t __p
)
33571 return __arm_vcvtq_m_f32_u32 (__inactive
, __a
, __p
);
33574 __extension__
extern __inline float16x8_t
33575 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33576 __arm_vcvtbq_m (float16x8_t __a
, float32x4_t __b
, mve_pred16_t __p
)
33578 return __arm_vcvtbq_m_f16_f32 (__a
, __b
, __p
);
33581 __extension__
extern __inline float32x4_t
33582 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33583 __arm_vcvtbq_m (float32x4_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33585 return __arm_vcvtbq_m_f32_f16 (__inactive
, __a
, __p
);
33588 __extension__
extern __inline float16x8_t
33589 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33590 __arm_vcvttq_m (float16x8_t __a
, float32x4_t __b
, mve_pred16_t __p
)
33592 return __arm_vcvttq_m_f16_f32 (__a
, __b
, __p
);
33595 __extension__
extern __inline float32x4_t
33596 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33597 __arm_vcvttq_m (float32x4_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33599 return __arm_vcvttq_m_f32_f16 (__inactive
, __a
, __p
);
33602 __extension__
extern __inline float16x8_t
33603 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33604 __arm_vrev32q_m (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33606 return __arm_vrev32q_m_f16 (__inactive
, __a
, __p
);
33609 __extension__
extern __inline float16x8_t
33610 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33611 __arm_vcmlaq (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
)
33613 return __arm_vcmlaq_f16 (__a
, __b
, __c
);
33616 __extension__
extern __inline float16x8_t
33617 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33618 __arm_vcmlaq_rot180 (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
)
33620 return __arm_vcmlaq_rot180_f16 (__a
, __b
, __c
);
33623 __extension__
extern __inline float16x8_t
33624 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33625 __arm_vcmlaq_rot270 (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
)
33627 return __arm_vcmlaq_rot270_f16 (__a
, __b
, __c
);
33630 __extension__
extern __inline float16x8_t
33631 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33632 __arm_vcmlaq_rot90 (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
)
33634 return __arm_vcmlaq_rot90_f16 (__a
, __b
, __c
);
33637 __extension__
extern __inline float16x8_t
33638 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33639 __arm_vfmaq (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
)
33641 return __arm_vfmaq_f16 (__a
, __b
, __c
);
33644 __extension__
extern __inline float16x8_t
33645 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33646 __arm_vfmaq (float16x8_t __a
, float16x8_t __b
, float16_t __c
)
33648 return __arm_vfmaq_n_f16 (__a
, __b
, __c
);
33651 __extension__
extern __inline float16x8_t
33652 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33653 __arm_vfmasq (float16x8_t __a
, float16x8_t __b
, float16_t __c
)
33655 return __arm_vfmasq_n_f16 (__a
, __b
, __c
);
33658 __extension__
extern __inline float16x8_t
33659 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33660 __arm_vfmsq (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
)
33662 return __arm_vfmsq_f16 (__a
, __b
, __c
);
33665 __extension__
extern __inline float16x8_t
33666 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33667 __arm_vabsq_m (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33669 return __arm_vabsq_m_f16 (__inactive
, __a
, __p
);
33672 __extension__
extern __inline int16x8_t
33673 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33674 __arm_vcvtmq_m (int16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33676 return __arm_vcvtmq_m_s16_f16 (__inactive
, __a
, __p
);
33679 __extension__
extern __inline int16x8_t
33680 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33681 __arm_vcvtnq_m (int16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33683 return __arm_vcvtnq_m_s16_f16 (__inactive
, __a
, __p
);
33686 __extension__
extern __inline int16x8_t
33687 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33688 __arm_vcvtpq_m (int16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33690 return __arm_vcvtpq_m_s16_f16 (__inactive
, __a
, __p
);
33693 __extension__
extern __inline int16x8_t
33694 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33695 __arm_vcvtq_m (int16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33697 return __arm_vcvtq_m_s16_f16 (__inactive
, __a
, __p
);
33700 __extension__
extern __inline float16x8_t
33701 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33702 __arm_vdupq_m (float16x8_t __inactive
, float16_t __a
, mve_pred16_t __p
)
33704 return __arm_vdupq_m_n_f16 (__inactive
, __a
, __p
);
33707 __extension__
extern __inline float16x8_t
33708 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33709 __arm_vmaxnmaq_m (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
33711 return __arm_vmaxnmaq_m_f16 (__a
, __b
, __p
);
33714 __extension__
extern __inline float16_t
33715 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33716 __arm_vmaxnmavq_p (float16_t __a
, float16x8_t __b
, mve_pred16_t __p
)
33718 return __arm_vmaxnmavq_p_f16 (__a
, __b
, __p
);
33721 __extension__
extern __inline float16_t
33722 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33723 __arm_vmaxnmvq_p (float16_t __a
, float16x8_t __b
, mve_pred16_t __p
)
33725 return __arm_vmaxnmvq_p_f16 (__a
, __b
, __p
);
33728 __extension__
extern __inline float16x8_t
33729 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33730 __arm_vminnmaq_m (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
33732 return __arm_vminnmaq_m_f16 (__a
, __b
, __p
);
33735 __extension__
extern __inline float16_t
33736 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33737 __arm_vminnmavq_p (float16_t __a
, float16x8_t __b
, mve_pred16_t __p
)
33739 return __arm_vminnmavq_p_f16 (__a
, __b
, __p
);
33742 __extension__
extern __inline float16_t
33743 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33744 __arm_vminnmvq_p (float16_t __a
, float16x8_t __b
, mve_pred16_t __p
)
33746 return __arm_vminnmvq_p_f16 (__a
, __b
, __p
);
33749 __extension__
extern __inline float16x8_t
33750 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33751 __arm_vnegq_m (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33753 return __arm_vnegq_m_f16 (__inactive
, __a
, __p
);
33756 __extension__
extern __inline float16x8_t
33757 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33758 __arm_vpselq (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
33760 return __arm_vpselq_f16 (__a
, __b
, __p
);
33763 __extension__
extern __inline float16x8_t
33764 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33765 __arm_vrev64q_m (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33767 return __arm_vrev64q_m_f16 (__inactive
, __a
, __p
);
33770 __extension__
extern __inline float16x8_t
33771 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33772 __arm_vrndaq_m (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33774 return __arm_vrndaq_m_f16 (__inactive
, __a
, __p
);
33777 __extension__
extern __inline float16x8_t
33778 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33779 __arm_vrndmq_m (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33781 return __arm_vrndmq_m_f16 (__inactive
, __a
, __p
);
33784 __extension__
extern __inline float16x8_t
33785 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33786 __arm_vrndnq_m (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33788 return __arm_vrndnq_m_f16 (__inactive
, __a
, __p
);
33791 __extension__
extern __inline float16x8_t
33792 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33793 __arm_vrndpq_m (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33795 return __arm_vrndpq_m_f16 (__inactive
, __a
, __p
);
33798 __extension__
extern __inline float16x8_t
33799 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33800 __arm_vrndq_m (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33802 return __arm_vrndq_m_f16 (__inactive
, __a
, __p
);
33805 __extension__
extern __inline float16x8_t
33806 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33807 __arm_vrndxq_m (float16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33809 return __arm_vrndxq_m_f16 (__inactive
, __a
, __p
);
33812 __extension__
extern __inline mve_pred16_t
33813 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33814 __arm_vcmpeqq_m (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
33816 return __arm_vcmpeqq_m_n_f16 (__a
, __b
, __p
);
33819 __extension__
extern __inline mve_pred16_t
33820 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33821 __arm_vcmpgeq_m (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
33823 return __arm_vcmpgeq_m_f16 (__a
, __b
, __p
);
33826 __extension__
extern __inline mve_pred16_t
33827 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33828 __arm_vcmpgeq_m (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
33830 return __arm_vcmpgeq_m_n_f16 (__a
, __b
, __p
);
33833 __extension__
extern __inline mve_pred16_t
33834 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33835 __arm_vcmpgtq_m (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
33837 return __arm_vcmpgtq_m_f16 (__a
, __b
, __p
);
33840 __extension__
extern __inline mve_pred16_t
33841 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33842 __arm_vcmpgtq_m (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
33844 return __arm_vcmpgtq_m_n_f16 (__a
, __b
, __p
);
33847 __extension__
extern __inline mve_pred16_t
33848 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33849 __arm_vcmpleq_m (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
33851 return __arm_vcmpleq_m_f16 (__a
, __b
, __p
);
33854 __extension__
extern __inline mve_pred16_t
33855 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33856 __arm_vcmpleq_m (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
33858 return __arm_vcmpleq_m_n_f16 (__a
, __b
, __p
);
33861 __extension__
extern __inline mve_pred16_t
33862 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33863 __arm_vcmpltq_m (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
33865 return __arm_vcmpltq_m_f16 (__a
, __b
, __p
);
33868 __extension__
extern __inline mve_pred16_t
33869 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33870 __arm_vcmpltq_m (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
33872 return __arm_vcmpltq_m_n_f16 (__a
, __b
, __p
);
33875 __extension__
extern __inline mve_pred16_t
33876 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33877 __arm_vcmpneq_m (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
33879 return __arm_vcmpneq_m_f16 (__a
, __b
, __p
);
33882 __extension__
extern __inline mve_pred16_t
33883 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33884 __arm_vcmpneq_m (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
33886 return __arm_vcmpneq_m_n_f16 (__a
, __b
, __p
);
33889 __extension__
extern __inline uint16x8_t
33890 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33891 __arm_vcvtmq_m (uint16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33893 return __arm_vcvtmq_m_u16_f16 (__inactive
, __a
, __p
);
33896 __extension__
extern __inline uint16x8_t
33897 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33898 __arm_vcvtnq_m (uint16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33900 return __arm_vcvtnq_m_u16_f16 (__inactive
, __a
, __p
);
33903 __extension__
extern __inline uint16x8_t
33904 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33905 __arm_vcvtpq_m (uint16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33907 return __arm_vcvtpq_m_u16_f16 (__inactive
, __a
, __p
);
33910 __extension__
extern __inline uint16x8_t
33911 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33912 __arm_vcvtq_m (uint16x8_t __inactive
, float16x8_t __a
, mve_pred16_t __p
)
33914 return __arm_vcvtq_m_u16_f16 (__inactive
, __a
, __p
);
33917 __extension__
extern __inline float32x4_t
33918 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33919 __arm_vcmlaq (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
)
33921 return __arm_vcmlaq_f32 (__a
, __b
, __c
);
33924 __extension__
extern __inline float32x4_t
33925 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33926 __arm_vcmlaq_rot180 (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
)
33928 return __arm_vcmlaq_rot180_f32 (__a
, __b
, __c
);
33931 __extension__
extern __inline float32x4_t
33932 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33933 __arm_vcmlaq_rot270 (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
)
33935 return __arm_vcmlaq_rot270_f32 (__a
, __b
, __c
);
33938 __extension__
extern __inline float32x4_t
33939 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33940 __arm_vcmlaq_rot90 (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
)
33942 return __arm_vcmlaq_rot90_f32 (__a
, __b
, __c
);
33945 __extension__
extern __inline float32x4_t
33946 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33947 __arm_vfmaq (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
)
33949 return __arm_vfmaq_f32 (__a
, __b
, __c
);
33952 __extension__
extern __inline float32x4_t
33953 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33954 __arm_vfmaq (float32x4_t __a
, float32x4_t __b
, float32_t __c
)
33956 return __arm_vfmaq_n_f32 (__a
, __b
, __c
);
33959 __extension__
extern __inline float32x4_t
33960 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33961 __arm_vfmasq (float32x4_t __a
, float32x4_t __b
, float32_t __c
)
33963 return __arm_vfmasq_n_f32 (__a
, __b
, __c
);
33966 __extension__
extern __inline float32x4_t
33967 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33968 __arm_vfmsq (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
)
33970 return __arm_vfmsq_f32 (__a
, __b
, __c
);
33973 __extension__
extern __inline float32x4_t
33974 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33975 __arm_vabsq_m (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
33977 return __arm_vabsq_m_f32 (__inactive
, __a
, __p
);
33980 __extension__
extern __inline int32x4_t
33981 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33982 __arm_vcvtmq_m (int32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
33984 return __arm_vcvtmq_m_s32_f32 (__inactive
, __a
, __p
);
33987 __extension__
extern __inline int32x4_t
33988 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33989 __arm_vcvtnq_m (int32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
33991 return __arm_vcvtnq_m_s32_f32 (__inactive
, __a
, __p
);
33994 __extension__
extern __inline int32x4_t
33995 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
33996 __arm_vcvtpq_m (int32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
33998 return __arm_vcvtpq_m_s32_f32 (__inactive
, __a
, __p
);
34001 __extension__
extern __inline int32x4_t
34002 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34003 __arm_vcvtq_m (int32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
34005 return __arm_vcvtq_m_s32_f32 (__inactive
, __a
, __p
);
34008 __extension__
extern __inline float32x4_t
34009 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34010 __arm_vdupq_m (float32x4_t __inactive
, float32_t __a
, mve_pred16_t __p
)
34012 return __arm_vdupq_m_n_f32 (__inactive
, __a
, __p
);
34015 __extension__
extern __inline float32x4_t
34016 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34017 __arm_vmaxnmaq_m (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34019 return __arm_vmaxnmaq_m_f32 (__a
, __b
, __p
);
34022 __extension__
extern __inline float32_t
34023 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34024 __arm_vmaxnmavq_p (float32_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34026 return __arm_vmaxnmavq_p_f32 (__a
, __b
, __p
);
34029 __extension__
extern __inline float32_t
34030 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34031 __arm_vmaxnmvq_p (float32_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34033 return __arm_vmaxnmvq_p_f32 (__a
, __b
, __p
);
34036 __extension__
extern __inline float32x4_t
34037 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34038 __arm_vminnmaq_m (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34040 return __arm_vminnmaq_m_f32 (__a
, __b
, __p
);
34043 __extension__
extern __inline float32_t
34044 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34045 __arm_vminnmavq_p (float32_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34047 return __arm_vminnmavq_p_f32 (__a
, __b
, __p
);
34050 __extension__
extern __inline float32_t
34051 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34052 __arm_vminnmvq_p (float32_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34054 return __arm_vminnmvq_p_f32 (__a
, __b
, __p
);
34057 __extension__
extern __inline float32x4_t
34058 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34059 __arm_vnegq_m (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
34061 return __arm_vnegq_m_f32 (__inactive
, __a
, __p
);
34064 __extension__
extern __inline float32x4_t
34065 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34066 __arm_vpselq (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34068 return __arm_vpselq_f32 (__a
, __b
, __p
);
34071 __extension__
extern __inline float32x4_t
34072 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34073 __arm_vrev64q_m (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
34075 return __arm_vrev64q_m_f32 (__inactive
, __a
, __p
);
34078 __extension__
extern __inline float32x4_t
34079 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34080 __arm_vrndaq_m (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
34082 return __arm_vrndaq_m_f32 (__inactive
, __a
, __p
);
34085 __extension__
extern __inline float32x4_t
34086 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34087 __arm_vrndmq_m (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
34089 return __arm_vrndmq_m_f32 (__inactive
, __a
, __p
);
34092 __extension__
extern __inline float32x4_t
34093 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34094 __arm_vrndnq_m (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
34096 return __arm_vrndnq_m_f32 (__inactive
, __a
, __p
);
34099 __extension__
extern __inline float32x4_t
34100 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34101 __arm_vrndpq_m (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
34103 return __arm_vrndpq_m_f32 (__inactive
, __a
, __p
);
34106 __extension__
extern __inline float32x4_t
34107 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34108 __arm_vrndq_m (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
34110 return __arm_vrndq_m_f32 (__inactive
, __a
, __p
);
34113 __extension__
extern __inline float32x4_t
34114 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34115 __arm_vrndxq_m (float32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
34117 return __arm_vrndxq_m_f32 (__inactive
, __a
, __p
);
34120 __extension__
extern __inline mve_pred16_t
34121 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34122 __arm_vcmpeqq_m (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
34124 return __arm_vcmpeqq_m_n_f32 (__a
, __b
, __p
);
34127 __extension__
extern __inline mve_pred16_t
34128 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34129 __arm_vcmpgeq_m (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34131 return __arm_vcmpgeq_m_f32 (__a
, __b
, __p
);
34134 __extension__
extern __inline mve_pred16_t
34135 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34136 __arm_vcmpgeq_m (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
34138 return __arm_vcmpgeq_m_n_f32 (__a
, __b
, __p
);
34141 __extension__
extern __inline mve_pred16_t
34142 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34143 __arm_vcmpgtq_m (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34145 return __arm_vcmpgtq_m_f32 (__a
, __b
, __p
);
34148 __extension__
extern __inline mve_pred16_t
34149 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34150 __arm_vcmpgtq_m (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
34152 return __arm_vcmpgtq_m_n_f32 (__a
, __b
, __p
);
34155 __extension__
extern __inline mve_pred16_t
34156 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34157 __arm_vcmpleq_m (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34159 return __arm_vcmpleq_m_f32 (__a
, __b
, __p
);
34162 __extension__
extern __inline mve_pred16_t
34163 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34164 __arm_vcmpleq_m (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
34166 return __arm_vcmpleq_m_n_f32 (__a
, __b
, __p
);
34169 __extension__
extern __inline mve_pred16_t
34170 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34171 __arm_vcmpltq_m (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34173 return __arm_vcmpltq_m_f32 (__a
, __b
, __p
);
34176 __extension__
extern __inline mve_pred16_t
34177 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34178 __arm_vcmpltq_m (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
34180 return __arm_vcmpltq_m_n_f32 (__a
, __b
, __p
);
34183 __extension__
extern __inline mve_pred16_t
34184 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34185 __arm_vcmpneq_m (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34187 return __arm_vcmpneq_m_f32 (__a
, __b
, __p
);
34190 __extension__
extern __inline mve_pred16_t
34191 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34192 __arm_vcmpneq_m (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
34194 return __arm_vcmpneq_m_n_f32 (__a
, __b
, __p
);
34197 __extension__
extern __inline uint32x4_t
34198 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34199 __arm_vcvtmq_m (uint32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
34201 return __arm_vcvtmq_m_u32_f32 (__inactive
, __a
, __p
);
34204 __extension__
extern __inline uint32x4_t
34205 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34206 __arm_vcvtnq_m (uint32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
34208 return __arm_vcvtnq_m_u32_f32 (__inactive
, __a
, __p
);
34211 __extension__
extern __inline uint32x4_t
34212 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34213 __arm_vcvtpq_m (uint32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
34215 return __arm_vcvtpq_m_u32_f32 (__inactive
, __a
, __p
);
34218 __extension__
extern __inline uint32x4_t
34219 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34220 __arm_vcvtq_m (uint32x4_t __inactive
, float32x4_t __a
, mve_pred16_t __p
)
34222 return __arm_vcvtq_m_u32_f32 (__inactive
, __a
, __p
);
34225 __extension__
extern __inline float16x8_t
34226 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34227 __arm_vcvtq_m_n (float16x8_t __inactive
, uint16x8_t __a
, const int __imm6
, mve_pred16_t __p
)
34229 return __arm_vcvtq_m_n_f16_u16 (__inactive
, __a
, __imm6
, __p
);
34232 __extension__
extern __inline float16x8_t
34233 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34234 __arm_vcvtq_m_n (float16x8_t __inactive
, int16x8_t __a
, const int __imm6
, mve_pred16_t __p
)
34236 return __arm_vcvtq_m_n_f16_s16 (__inactive
, __a
, __imm6
, __p
);
34239 __extension__
extern __inline float32x4_t
34240 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34241 __arm_vcvtq_m_n (float32x4_t __inactive
, uint32x4_t __a
, const int __imm6
, mve_pred16_t __p
)
34243 return __arm_vcvtq_m_n_f32_u32 (__inactive
, __a
, __imm6
, __p
);
34246 __extension__
extern __inline float32x4_t
34247 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34248 __arm_vcvtq_m_n (float32x4_t __inactive
, int32x4_t __a
, const int __imm6
, mve_pred16_t __p
)
34250 return __arm_vcvtq_m_n_f32_s32 (__inactive
, __a
, __imm6
, __p
);
34253 __extension__
extern __inline float32x4_t
34254 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34255 __arm_vabdq_m (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34257 return __arm_vabdq_m_f32 (__inactive
, __a
, __b
, __p
);
34260 __extension__
extern __inline float16x8_t
34261 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34262 __arm_vabdq_m (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34264 return __arm_vabdq_m_f16 (__inactive
, __a
, __b
, __p
);
34267 __extension__
extern __inline float32x4_t
34268 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34269 __arm_vaddq_m (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34271 return __arm_vaddq_m_f32 (__inactive
, __a
, __b
, __p
);
34274 __extension__
extern __inline float16x8_t
34275 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34276 __arm_vaddq_m (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34278 return __arm_vaddq_m_f16 (__inactive
, __a
, __b
, __p
);
34281 __extension__
extern __inline float32x4_t
34282 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34283 __arm_vaddq_m (float32x4_t __inactive
, float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
34285 return __arm_vaddq_m_n_f32 (__inactive
, __a
, __b
, __p
);
34288 __extension__
extern __inline float16x8_t
34289 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34290 __arm_vaddq_m (float16x8_t __inactive
, float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
34292 return __arm_vaddq_m_n_f16 (__inactive
, __a
, __b
, __p
);
34295 __extension__
extern __inline float32x4_t
34296 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34297 __arm_vandq_m (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34299 return __arm_vandq_m_f32 (__inactive
, __a
, __b
, __p
);
34302 __extension__
extern __inline float16x8_t
34303 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34304 __arm_vandq_m (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34306 return __arm_vandq_m_f16 (__inactive
, __a
, __b
, __p
);
34309 __extension__
extern __inline float32x4_t
34310 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34311 __arm_vbicq_m (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34313 return __arm_vbicq_m_f32 (__inactive
, __a
, __b
, __p
);
34316 __extension__
extern __inline float16x8_t
34317 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34318 __arm_vbicq_m (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34320 return __arm_vbicq_m_f16 (__inactive
, __a
, __b
, __p
);
34323 __extension__
extern __inline float32x4_t
34324 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34325 __arm_vbrsrq_m (float32x4_t __inactive
, float32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
34327 return __arm_vbrsrq_m_n_f32 (__inactive
, __a
, __b
, __p
);
34330 __extension__
extern __inline float16x8_t
34331 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34332 __arm_vbrsrq_m (float16x8_t __inactive
, float16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
34334 return __arm_vbrsrq_m_n_f16 (__inactive
, __a
, __b
, __p
);
34337 __extension__
extern __inline float32x4_t
34338 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34339 __arm_vcaddq_rot270_m (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34341 return __arm_vcaddq_rot270_m_f32 (__inactive
, __a
, __b
, __p
);
34344 __extension__
extern __inline float16x8_t
34345 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34346 __arm_vcaddq_rot270_m (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34348 return __arm_vcaddq_rot270_m_f16 (__inactive
, __a
, __b
, __p
);
34351 __extension__
extern __inline float32x4_t
34352 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34353 __arm_vcaddq_rot90_m (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34355 return __arm_vcaddq_rot90_m_f32 (__inactive
, __a
, __b
, __p
);
34358 __extension__
extern __inline float16x8_t
34359 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34360 __arm_vcaddq_rot90_m (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34362 return __arm_vcaddq_rot90_m_f16 (__inactive
, __a
, __b
, __p
);
34365 __extension__
extern __inline float32x4_t
34366 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34367 __arm_vcmlaq_m (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
, mve_pred16_t __p
)
34369 return __arm_vcmlaq_m_f32 (__a
, __b
, __c
, __p
);
34372 __extension__
extern __inline float16x8_t
34373 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34374 __arm_vcmlaq_m (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
, mve_pred16_t __p
)
34376 return __arm_vcmlaq_m_f16 (__a
, __b
, __c
, __p
);
34379 __extension__
extern __inline float32x4_t
34380 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34381 __arm_vcmlaq_rot180_m (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
, mve_pred16_t __p
)
34383 return __arm_vcmlaq_rot180_m_f32 (__a
, __b
, __c
, __p
);
34386 __extension__
extern __inline float16x8_t
34387 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34388 __arm_vcmlaq_rot180_m (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
, mve_pred16_t __p
)
34390 return __arm_vcmlaq_rot180_m_f16 (__a
, __b
, __c
, __p
);
34393 __extension__
extern __inline float32x4_t
34394 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34395 __arm_vcmlaq_rot270_m (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
, mve_pred16_t __p
)
34397 return __arm_vcmlaq_rot270_m_f32 (__a
, __b
, __c
, __p
);
34400 __extension__
extern __inline float16x8_t
34401 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34402 __arm_vcmlaq_rot270_m (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
, mve_pred16_t __p
)
34404 return __arm_vcmlaq_rot270_m_f16 (__a
, __b
, __c
, __p
);
34407 __extension__
extern __inline float32x4_t
34408 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34409 __arm_vcmlaq_rot90_m (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
, mve_pred16_t __p
)
34411 return __arm_vcmlaq_rot90_m_f32 (__a
, __b
, __c
, __p
);
34414 __extension__
extern __inline float16x8_t
34415 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34416 __arm_vcmlaq_rot90_m (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
, mve_pred16_t __p
)
34418 return __arm_vcmlaq_rot90_m_f16 (__a
, __b
, __c
, __p
);
34421 __extension__
extern __inline float32x4_t
34422 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34423 __arm_vcmulq_m (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34425 return __arm_vcmulq_m_f32 (__inactive
, __a
, __b
, __p
);
34428 __extension__
extern __inline float16x8_t
34429 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34430 __arm_vcmulq_m (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34432 return __arm_vcmulq_m_f16 (__inactive
, __a
, __b
, __p
);
34435 __extension__
extern __inline float32x4_t
34436 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34437 __arm_vcmulq_rot180_m (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34439 return __arm_vcmulq_rot180_m_f32 (__inactive
, __a
, __b
, __p
);
34442 __extension__
extern __inline float16x8_t
34443 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34444 __arm_vcmulq_rot180_m (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34446 return __arm_vcmulq_rot180_m_f16 (__inactive
, __a
, __b
, __p
);
34449 __extension__
extern __inline float32x4_t
34450 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34451 __arm_vcmulq_rot270_m (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34453 return __arm_vcmulq_rot270_m_f32 (__inactive
, __a
, __b
, __p
);
34456 __extension__
extern __inline float16x8_t
34457 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34458 __arm_vcmulq_rot270_m (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34460 return __arm_vcmulq_rot270_m_f16 (__inactive
, __a
, __b
, __p
);
34463 __extension__
extern __inline float32x4_t
34464 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34465 __arm_vcmulq_rot90_m (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34467 return __arm_vcmulq_rot90_m_f32 (__inactive
, __a
, __b
, __p
);
34470 __extension__
extern __inline float16x8_t
34471 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34472 __arm_vcmulq_rot90_m (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34474 return __arm_vcmulq_rot90_m_f16 (__inactive
, __a
, __b
, __p
);
34477 __extension__
extern __inline int32x4_t
34478 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34479 __arm_vcvtq_m_n (int32x4_t __inactive
, float32x4_t __a
, const int __imm6
, mve_pred16_t __p
)
34481 return __arm_vcvtq_m_n_s32_f32 (__inactive
, __a
, __imm6
, __p
);
34484 __extension__
extern __inline int16x8_t
34485 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34486 __arm_vcvtq_m_n (int16x8_t __inactive
, float16x8_t __a
, const int __imm6
, mve_pred16_t __p
)
34488 return __arm_vcvtq_m_n_s16_f16 (__inactive
, __a
, __imm6
, __p
);
34491 __extension__
extern __inline uint32x4_t
34492 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34493 __arm_vcvtq_m_n (uint32x4_t __inactive
, float32x4_t __a
, const int __imm6
, mve_pred16_t __p
)
34495 return __arm_vcvtq_m_n_u32_f32 (__inactive
, __a
, __imm6
, __p
);
34498 __extension__
extern __inline uint16x8_t
34499 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34500 __arm_vcvtq_m_n (uint16x8_t __inactive
, float16x8_t __a
, const int __imm6
, mve_pred16_t __p
)
34502 return __arm_vcvtq_m_n_u16_f16 (__inactive
, __a
, __imm6
, __p
);
34505 __extension__
extern __inline float32x4_t
34506 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34507 __arm_veorq_m (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34509 return __arm_veorq_m_f32 (__inactive
, __a
, __b
, __p
);
34512 __extension__
extern __inline float16x8_t
34513 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34514 __arm_veorq_m (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34516 return __arm_veorq_m_f16 (__inactive
, __a
, __b
, __p
);
34519 __extension__
extern __inline float32x4_t
34520 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34521 __arm_vfmaq_m (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
, mve_pred16_t __p
)
34523 return __arm_vfmaq_m_f32 (__a
, __b
, __c
, __p
);
34526 __extension__
extern __inline float16x8_t
34527 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34528 __arm_vfmaq_m (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
, mve_pred16_t __p
)
34530 return __arm_vfmaq_m_f16 (__a
, __b
, __c
, __p
);
34533 __extension__
extern __inline float32x4_t
34534 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34535 __arm_vfmaq_m (float32x4_t __a
, float32x4_t __b
, float32_t __c
, mve_pred16_t __p
)
34537 return __arm_vfmaq_m_n_f32 (__a
, __b
, __c
, __p
);
34540 __extension__
extern __inline float16x8_t
34541 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34542 __arm_vfmaq_m (float16x8_t __a
, float16x8_t __b
, float16_t __c
, mve_pred16_t __p
)
34544 return __arm_vfmaq_m_n_f16 (__a
, __b
, __c
, __p
);
34547 __extension__
extern __inline float32x4_t
34548 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34549 __arm_vfmasq_m (float32x4_t __a
, float32x4_t __b
, float32_t __c
, mve_pred16_t __p
)
34551 return __arm_vfmasq_m_n_f32 (__a
, __b
, __c
, __p
);
34554 __extension__
extern __inline float16x8_t
34555 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34556 __arm_vfmasq_m (float16x8_t __a
, float16x8_t __b
, float16_t __c
, mve_pred16_t __p
)
34558 return __arm_vfmasq_m_n_f16 (__a
, __b
, __c
, __p
);
34561 __extension__
extern __inline float32x4_t
34562 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34563 __arm_vfmsq_m (float32x4_t __a
, float32x4_t __b
, float32x4_t __c
, mve_pred16_t __p
)
34565 return __arm_vfmsq_m_f32 (__a
, __b
, __c
, __p
);
34568 __extension__
extern __inline float16x8_t
34569 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34570 __arm_vfmsq_m (float16x8_t __a
, float16x8_t __b
, float16x8_t __c
, mve_pred16_t __p
)
34572 return __arm_vfmsq_m_f16 (__a
, __b
, __c
, __p
);
34575 __extension__
extern __inline float32x4_t
34576 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34577 __arm_vmaxnmq_m (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34579 return __arm_vmaxnmq_m_f32 (__inactive
, __a
, __b
, __p
);
34582 __extension__
extern __inline float16x8_t
34583 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34584 __arm_vmaxnmq_m (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34586 return __arm_vmaxnmq_m_f16 (__inactive
, __a
, __b
, __p
);
34589 __extension__
extern __inline float32x4_t
34590 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34591 __arm_vminnmq_m (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34593 return __arm_vminnmq_m_f32 (__inactive
, __a
, __b
, __p
);
34596 __extension__
extern __inline float16x8_t
34597 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34598 __arm_vminnmq_m (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34600 return __arm_vminnmq_m_f16 (__inactive
, __a
, __b
, __p
);
34603 __extension__
extern __inline float32x4_t
34604 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34605 __arm_vmulq_m (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34607 return __arm_vmulq_m_f32 (__inactive
, __a
, __b
, __p
);
34610 __extension__
extern __inline float16x8_t
34611 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34612 __arm_vmulq_m (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34614 return __arm_vmulq_m_f16 (__inactive
, __a
, __b
, __p
);
34617 __extension__
extern __inline float32x4_t
34618 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34619 __arm_vmulq_m (float32x4_t __inactive
, float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
34621 return __arm_vmulq_m_n_f32 (__inactive
, __a
, __b
, __p
);
34624 __extension__
extern __inline float16x8_t
34625 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34626 __arm_vmulq_m (float16x8_t __inactive
, float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
34628 return __arm_vmulq_m_n_f16 (__inactive
, __a
, __b
, __p
);
34631 __extension__
extern __inline float32x4_t
34632 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34633 __arm_vornq_m (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34635 return __arm_vornq_m_f32 (__inactive
, __a
, __b
, __p
);
34638 __extension__
extern __inline float16x8_t
34639 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34640 __arm_vornq_m (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34642 return __arm_vornq_m_f16 (__inactive
, __a
, __b
, __p
);
34645 __extension__
extern __inline float32x4_t
34646 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34647 __arm_vorrq_m (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34649 return __arm_vorrq_m_f32 (__inactive
, __a
, __b
, __p
);
34652 __extension__
extern __inline float16x8_t
34653 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34654 __arm_vorrq_m (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34656 return __arm_vorrq_m_f16 (__inactive
, __a
, __b
, __p
);
34659 __extension__
extern __inline float32x4_t
34660 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34661 __arm_vsubq_m (float32x4_t __inactive
, float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34663 return __arm_vsubq_m_f32 (__inactive
, __a
, __b
, __p
);
34666 __extension__
extern __inline float16x8_t
34667 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34668 __arm_vsubq_m (float16x8_t __inactive
, float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34670 return __arm_vsubq_m_f16 (__inactive
, __a
, __b
, __p
);
34673 __extension__
extern __inline float32x4_t
34674 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34675 __arm_vsubq_m (float32x4_t __inactive
, float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
34677 return __arm_vsubq_m_n_f32 (__inactive
, __a
, __b
, __p
);
34680 __extension__
extern __inline float16x8_t
34681 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34682 __arm_vsubq_m (float16x8_t __inactive
, float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
34684 return __arm_vsubq_m_n_f16 (__inactive
, __a
, __b
, __p
);
34687 __extension__
extern __inline float32x4_t
34688 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34689 __arm_vld1q (float32_t
const * __base
)
34691 return __arm_vld1q_f32 (__base
);
34694 __extension__
extern __inline float16x8_t
34695 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34696 __arm_vld1q (float16_t
const * __base
)
34698 return __arm_vld1q_f16 (__base
);
34701 __extension__
extern __inline float16x8_t
34702 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34703 __arm_vldrhq_gather_offset (float16_t
const * __base
, uint16x8_t __offset
)
34705 return __arm_vldrhq_gather_offset_f16 (__base
, __offset
);
34708 __extension__
extern __inline float16x8_t
34709 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34710 __arm_vldrhq_gather_offset_z (float16_t
const * __base
, uint16x8_t __offset
, mve_pred16_t __p
)
34712 return __arm_vldrhq_gather_offset_z_f16 (__base
, __offset
, __p
);
34715 __extension__
extern __inline float16x8_t
34716 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34717 __arm_vldrhq_gather_shifted_offset (float16_t
const * __base
, uint16x8_t __offset
)
34719 return __arm_vldrhq_gather_shifted_offset_f16 (__base
, __offset
);
34722 __extension__
extern __inline float16x8_t
34723 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34724 __arm_vldrhq_gather_shifted_offset_z (float16_t
const * __base
, uint16x8_t __offset
, mve_pred16_t __p
)
34726 return __arm_vldrhq_gather_shifted_offset_z_f16 (__base
, __offset
, __p
);
34729 __extension__
extern __inline float32x4_t
34730 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34731 __arm_vldrwq_gather_offset (float32_t
const * __base
, uint32x4_t __offset
)
34733 return __arm_vldrwq_gather_offset_f32 (__base
, __offset
);
34736 __extension__
extern __inline float32x4_t
34737 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34738 __arm_vldrwq_gather_offset_z (float32_t
const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
34740 return __arm_vldrwq_gather_offset_z_f32 (__base
, __offset
, __p
);
34743 __extension__
extern __inline float32x4_t
34744 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34745 __arm_vldrwq_gather_shifted_offset (float32_t
const * __base
, uint32x4_t __offset
)
34747 return __arm_vldrwq_gather_shifted_offset_f32 (__base
, __offset
);
34750 __extension__
extern __inline float32x4_t
34751 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34752 __arm_vldrwq_gather_shifted_offset_z (float32_t
const * __base
, uint32x4_t __offset
, mve_pred16_t __p
)
34754 return __arm_vldrwq_gather_shifted_offset_z_f32 (__base
, __offset
, __p
);
34757 __extension__
extern __inline
void
34758 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34759 __arm_vstrwq_p (float32_t
* __addr
, float32x4_t __value
, mve_pred16_t __p
)
34761 __arm_vstrwq_p_f32 (__addr
, __value
, __p
);
34764 __extension__
extern __inline
void
34765 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34766 __arm_vstrwq (float32_t
* __addr
, float32x4_t __value
)
34768 __arm_vstrwq_f32 (__addr
, __value
);
34771 __extension__
extern __inline
void
34772 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34773 __arm_vst1q (float32_t
* __addr
, float32x4_t __value
)
34775 __arm_vst1q_f32 (__addr
, __value
);
34778 __extension__
extern __inline
void
34779 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34780 __arm_vst1q (float16_t
* __addr
, float16x8_t __value
)
34782 __arm_vst1q_f16 (__addr
, __value
);
34785 __extension__
extern __inline
void
34786 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34787 __arm_vstrhq (float16_t
* __addr
, float16x8_t __value
)
34789 __arm_vstrhq_f16 (__addr
, __value
);
34792 __extension__
extern __inline
void
34793 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34794 __arm_vstrhq_p (float16_t
* __addr
, float16x8_t __value
, mve_pred16_t __p
)
34796 __arm_vstrhq_p_f16 (__addr
, __value
, __p
);
34799 __extension__
extern __inline
void
34800 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34801 __arm_vstrhq_scatter_offset (float16_t
* __base
, uint16x8_t __offset
, float16x8_t __value
)
34803 __arm_vstrhq_scatter_offset_f16 (__base
, __offset
, __value
);
34806 __extension__
extern __inline
void
34807 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34808 __arm_vstrhq_scatter_offset_p (float16_t
* __base
, uint16x8_t __offset
, float16x8_t __value
, mve_pred16_t __p
)
34810 __arm_vstrhq_scatter_offset_p_f16 (__base
, __offset
, __value
, __p
);
34813 __extension__
extern __inline
void
34814 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34815 __arm_vstrhq_scatter_shifted_offset (float16_t
* __base
, uint16x8_t __offset
, float16x8_t __value
)
34817 __arm_vstrhq_scatter_shifted_offset_f16 (__base
, __offset
, __value
);
34820 __extension__
extern __inline
void
34821 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34822 __arm_vstrhq_scatter_shifted_offset_p (float16_t
* __base
, uint16x8_t __offset
, float16x8_t __value
, mve_pred16_t __p
)
34824 __arm_vstrhq_scatter_shifted_offset_p_f16 (__base
, __offset
, __value
, __p
);
34827 __extension__
extern __inline
void
34828 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34829 __arm_vstrwq_scatter_base (uint32x4_t __addr
, const int __offset
, float32x4_t __value
)
34831 __arm_vstrwq_scatter_base_f32 (__addr
, __offset
, __value
);
34834 __extension__
extern __inline
void
34835 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34836 __arm_vstrwq_scatter_base_p (uint32x4_t __addr
, const int __offset
, float32x4_t __value
, mve_pred16_t __p
)
34838 __arm_vstrwq_scatter_base_p_f32 (__addr
, __offset
, __value
, __p
);
34841 __extension__
extern __inline
void
34842 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34843 __arm_vstrwq_scatter_offset (float32_t
* __base
, uint32x4_t __offset
, float32x4_t __value
)
34845 __arm_vstrwq_scatter_offset_f32 (__base
, __offset
, __value
);
34848 __extension__
extern __inline
void
34849 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34850 __arm_vstrwq_scatter_offset_p (float32_t
* __base
, uint32x4_t __offset
, float32x4_t __value
, mve_pred16_t __p
)
34852 __arm_vstrwq_scatter_offset_p_f32 (__base
, __offset
, __value
, __p
);
34855 __extension__
extern __inline
void
34856 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34857 __arm_vstrwq_scatter_shifted_offset (float32_t
* __base
, uint32x4_t __offset
, float32x4_t __value
)
34859 __arm_vstrwq_scatter_shifted_offset_f32 (__base
, __offset
, __value
);
34862 __extension__
extern __inline
void
34863 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34864 __arm_vstrwq_scatter_shifted_offset_p (float32_t
* __base
, uint32x4_t __offset
, float32x4_t __value
, mve_pred16_t __p
)
34866 __arm_vstrwq_scatter_shifted_offset_p_f32 (__base
, __offset
, __value
, __p
);
34869 __extension__
extern __inline float16x8_t
34870 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34871 __arm_vaddq (float16x8_t __a
, float16x8_t __b
)
34873 return __arm_vaddq_f16 (__a
, __b
);
34876 __extension__
extern __inline float32x4_t
34877 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34878 __arm_vaddq (float32x4_t __a
, float32x4_t __b
)
34880 return __arm_vaddq_f32 (__a
, __b
);
34883 __extension__
extern __inline
void
34884 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34885 __arm_vstrwq_scatter_base_wb (uint32x4_t
* __addr
, const int __offset
, float32x4_t __value
)
34887 __arm_vstrwq_scatter_base_wb_f32 (__addr
, __offset
, __value
);
34890 __extension__
extern __inline
void
34891 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34892 __arm_vstrwq_scatter_base_wb_p (uint32x4_t
* __addr
, const int __offset
, float32x4_t __value
, mve_pred16_t __p
)
34894 __arm_vstrwq_scatter_base_wb_p_f32 (__addr
, __offset
, __value
, __p
);
34897 __extension__
extern __inline float16x8_t
34898 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34899 __arm_vminnmq_x (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34901 return __arm_vminnmq_x_f16 (__a
, __b
, __p
);
34904 __extension__
extern __inline float32x4_t
34905 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34906 __arm_vminnmq_x (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34908 return __arm_vminnmq_x_f32 (__a
, __b
, __p
);
34911 __extension__
extern __inline float16x8_t
34912 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34913 __arm_vmaxnmq_x (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34915 return __arm_vmaxnmq_x_f16 (__a
, __b
, __p
);
34918 __extension__
extern __inline float32x4_t
34919 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34920 __arm_vmaxnmq_x (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34922 return __arm_vmaxnmq_x_f32 (__a
, __b
, __p
);
34925 __extension__
extern __inline float16x8_t
34926 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34927 __arm_vabdq_x (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34929 return __arm_vabdq_x_f16 (__a
, __b
, __p
);
34932 __extension__
extern __inline float32x4_t
34933 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34934 __arm_vabdq_x (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34936 return __arm_vabdq_x_f32 (__a
, __b
, __p
);
34939 __extension__
extern __inline float16x8_t
34940 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34941 __arm_vabsq_x (float16x8_t __a
, mve_pred16_t __p
)
34943 return __arm_vabsq_x_f16 (__a
, __p
);
34946 __extension__
extern __inline float32x4_t
34947 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34948 __arm_vabsq_x (float32x4_t __a
, mve_pred16_t __p
)
34950 return __arm_vabsq_x_f32 (__a
, __p
);
34953 __extension__
extern __inline float16x8_t
34954 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34955 __arm_vaddq_x (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34957 return __arm_vaddq_x_f16 (__a
, __b
, __p
);
34960 __extension__
extern __inline float32x4_t
34961 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34962 __arm_vaddq_x (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
34964 return __arm_vaddq_x_f32 (__a
, __b
, __p
);
34967 __extension__
extern __inline float16x8_t
34968 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34969 __arm_vaddq_x (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
34971 return __arm_vaddq_x_n_f16 (__a
, __b
, __p
);
34974 __extension__
extern __inline float32x4_t
34975 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34976 __arm_vaddq_x (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
34978 return __arm_vaddq_x_n_f32 (__a
, __b
, __p
);
34981 __extension__
extern __inline float16x8_t
34982 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34983 __arm_vnegq_x (float16x8_t __a
, mve_pred16_t __p
)
34985 return __arm_vnegq_x_f16 (__a
, __p
);
34988 __extension__
extern __inline float32x4_t
34989 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34990 __arm_vnegq_x (float32x4_t __a
, mve_pred16_t __p
)
34992 return __arm_vnegq_x_f32 (__a
, __p
);
34995 __extension__
extern __inline float16x8_t
34996 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
34997 __arm_vmulq_x (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
34999 return __arm_vmulq_x_f16 (__a
, __b
, __p
);
35002 __extension__
extern __inline float32x4_t
35003 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35004 __arm_vmulq_x (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
35006 return __arm_vmulq_x_f32 (__a
, __b
, __p
);
35009 __extension__
extern __inline float16x8_t
35010 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35011 __arm_vmulq_x (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
35013 return __arm_vmulq_x_n_f16 (__a
, __b
, __p
);
35016 __extension__
extern __inline float32x4_t
35017 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35018 __arm_vmulq_x (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
35020 return __arm_vmulq_x_n_f32 (__a
, __b
, __p
);
35023 __extension__
extern __inline float16x8_t
35024 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35025 __arm_vsubq_x (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
35027 return __arm_vsubq_x_f16 (__a
, __b
, __p
);
35030 __extension__
extern __inline float32x4_t
35031 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35032 __arm_vsubq_x (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
35034 return __arm_vsubq_x_f32 (__a
, __b
, __p
);
35037 __extension__
extern __inline float16x8_t
35038 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35039 __arm_vsubq_x (float16x8_t __a
, float16_t __b
, mve_pred16_t __p
)
35041 return __arm_vsubq_x_n_f16 (__a
, __b
, __p
);
35044 __extension__
extern __inline float32x4_t
35045 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35046 __arm_vsubq_x (float32x4_t __a
, float32_t __b
, mve_pred16_t __p
)
35048 return __arm_vsubq_x_n_f32 (__a
, __b
, __p
);
35051 __extension__
extern __inline float16x8_t
35052 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35053 __arm_vcaddq_rot90_x (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
35055 return __arm_vcaddq_rot90_x_f16 (__a
, __b
, __p
);
35058 __extension__
extern __inline float32x4_t
35059 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35060 __arm_vcaddq_rot90_x (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
35062 return __arm_vcaddq_rot90_x_f32 (__a
, __b
, __p
);
35065 __extension__
extern __inline float16x8_t
35066 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35067 __arm_vcaddq_rot270_x (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
35069 return __arm_vcaddq_rot270_x_f16 (__a
, __b
, __p
);
35072 __extension__
extern __inline float32x4_t
35073 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35074 __arm_vcaddq_rot270_x (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
35076 return __arm_vcaddq_rot270_x_f32 (__a
, __b
, __p
);
35079 __extension__
extern __inline float16x8_t
35080 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35081 __arm_vcmulq_x (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
35083 return __arm_vcmulq_x_f16 (__a
, __b
, __p
);
35086 __extension__
extern __inline float32x4_t
35087 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35088 __arm_vcmulq_x (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
35090 return __arm_vcmulq_x_f32 (__a
, __b
, __p
);
35093 __extension__
extern __inline float16x8_t
35094 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35095 __arm_vcmulq_rot90_x (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
35097 return __arm_vcmulq_rot90_x_f16 (__a
, __b
, __p
);
35100 __extension__
extern __inline float32x4_t
35101 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35102 __arm_vcmulq_rot90_x (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
35104 return __arm_vcmulq_rot90_x_f32 (__a
, __b
, __p
);
35107 __extension__
extern __inline float16x8_t
35108 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35109 __arm_vcmulq_rot180_x (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
35111 return __arm_vcmulq_rot180_x_f16 (__a
, __b
, __p
);
35114 __extension__
extern __inline float32x4_t
35115 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35116 __arm_vcmulq_rot180_x (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
35118 return __arm_vcmulq_rot180_x_f32 (__a
, __b
, __p
);
35121 __extension__
extern __inline float16x8_t
35122 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35123 __arm_vcmulq_rot270_x (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
35125 return __arm_vcmulq_rot270_x_f16 (__a
, __b
, __p
);
35128 __extension__
extern __inline float32x4_t
35129 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35130 __arm_vcmulq_rot270_x (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
35132 return __arm_vcmulq_rot270_x_f32 (__a
, __b
, __p
);
35135 __extension__
extern __inline float16x8_t
35136 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35137 __arm_vcvtq_x (uint16x8_t __a
, mve_pred16_t __p
)
35139 return __arm_vcvtq_x_f16_u16 (__a
, __p
);
35142 __extension__
extern __inline float16x8_t
35143 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35144 __arm_vcvtq_x (int16x8_t __a
, mve_pred16_t __p
)
35146 return __arm_vcvtq_x_f16_s16 (__a
, __p
);
35149 __extension__
extern __inline float32x4_t
35150 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35151 __arm_vcvtq_x (int32x4_t __a
, mve_pred16_t __p
)
35153 return __arm_vcvtq_x_f32_s32 (__a
, __p
);
35156 __extension__
extern __inline float32x4_t
35157 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35158 __arm_vcvtq_x (uint32x4_t __a
, mve_pred16_t __p
)
35160 return __arm_vcvtq_x_f32_u32 (__a
, __p
);
35163 __extension__
extern __inline float16x8_t
35164 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35165 __arm_vcvtq_x_n (int16x8_t __a
, const int __imm6
, mve_pred16_t __p
)
35167 return __arm_vcvtq_x_n_f16_s16 (__a
, __imm6
, __p
);
35170 __extension__
extern __inline float16x8_t
35171 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35172 __arm_vcvtq_x_n (uint16x8_t __a
, const int __imm6
, mve_pred16_t __p
)
35174 return __arm_vcvtq_x_n_f16_u16 (__a
, __imm6
, __p
);
35177 __extension__
extern __inline float32x4_t
35178 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35179 __arm_vcvtq_x_n (int32x4_t __a
, const int __imm6
, mve_pred16_t __p
)
35181 return __arm_vcvtq_x_n_f32_s32 (__a
, __imm6
, __p
);
35184 __extension__
extern __inline float32x4_t
35185 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35186 __arm_vcvtq_x_n (uint32x4_t __a
, const int __imm6
, mve_pred16_t __p
)
35188 return __arm_vcvtq_x_n_f32_u32 (__a
, __imm6
, __p
);
35191 __extension__
extern __inline float16x8_t
35192 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35193 __arm_vrndq_x (float16x8_t __a
, mve_pred16_t __p
)
35195 return __arm_vrndq_x_f16 (__a
, __p
);
35198 __extension__
extern __inline float32x4_t
35199 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35200 __arm_vrndq_x (float32x4_t __a
, mve_pred16_t __p
)
35202 return __arm_vrndq_x_f32 (__a
, __p
);
35205 __extension__
extern __inline float16x8_t
35206 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35207 __arm_vrndnq_x (float16x8_t __a
, mve_pred16_t __p
)
35209 return __arm_vrndnq_x_f16 (__a
, __p
);
35212 __extension__
extern __inline float32x4_t
35213 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35214 __arm_vrndnq_x (float32x4_t __a
, mve_pred16_t __p
)
35216 return __arm_vrndnq_x_f32 (__a
, __p
);
35219 __extension__
extern __inline float16x8_t
35220 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35221 __arm_vrndmq_x (float16x8_t __a
, mve_pred16_t __p
)
35223 return __arm_vrndmq_x_f16 (__a
, __p
);
35226 __extension__
extern __inline float32x4_t
35227 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35228 __arm_vrndmq_x (float32x4_t __a
, mve_pred16_t __p
)
35230 return __arm_vrndmq_x_f32 (__a
, __p
);
35233 __extension__
extern __inline float16x8_t
35234 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35235 __arm_vrndpq_x (float16x8_t __a
, mve_pred16_t __p
)
35237 return __arm_vrndpq_x_f16 (__a
, __p
);
35240 __extension__
extern __inline float32x4_t
35241 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35242 __arm_vrndpq_x (float32x4_t __a
, mve_pred16_t __p
)
35244 return __arm_vrndpq_x_f32 (__a
, __p
);
35247 __extension__
extern __inline float16x8_t
35248 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35249 __arm_vrndaq_x (float16x8_t __a
, mve_pred16_t __p
)
35251 return __arm_vrndaq_x_f16 (__a
, __p
);
35254 __extension__
extern __inline float32x4_t
35255 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35256 __arm_vrndaq_x (float32x4_t __a
, mve_pred16_t __p
)
35258 return __arm_vrndaq_x_f32 (__a
, __p
);
35261 __extension__
extern __inline float16x8_t
35262 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35263 __arm_vrndxq_x (float16x8_t __a
, mve_pred16_t __p
)
35265 return __arm_vrndxq_x_f16 (__a
, __p
);
35268 __extension__
extern __inline float32x4_t
35269 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35270 __arm_vrndxq_x (float32x4_t __a
, mve_pred16_t __p
)
35272 return __arm_vrndxq_x_f32 (__a
, __p
);
35275 __extension__
extern __inline float16x8_t
35276 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35277 __arm_vandq_x (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
35279 return __arm_vandq_x_f16 (__a
, __b
, __p
);
35282 __extension__
extern __inline float32x4_t
35283 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35284 __arm_vandq_x (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
35286 return __arm_vandq_x_f32 (__a
, __b
, __p
);
35289 __extension__
extern __inline float16x8_t
35290 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35291 __arm_vbicq_x (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
35293 return __arm_vbicq_x_f16 (__a
, __b
, __p
);
35296 __extension__
extern __inline float32x4_t
35297 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35298 __arm_vbicq_x (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
35300 return __arm_vbicq_x_f32 (__a
, __b
, __p
);
35303 __extension__
extern __inline float16x8_t
35304 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35305 __arm_vbrsrq_x (float16x8_t __a
, int32_t __b
, mve_pred16_t __p
)
35307 return __arm_vbrsrq_x_n_f16 (__a
, __b
, __p
);
35310 __extension__
extern __inline float32x4_t
35311 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35312 __arm_vbrsrq_x (float32x4_t __a
, int32_t __b
, mve_pred16_t __p
)
35314 return __arm_vbrsrq_x_n_f32 (__a
, __b
, __p
);
35317 __extension__
extern __inline float16x8_t
35318 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35319 __arm_veorq_x (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
35321 return __arm_veorq_x_f16 (__a
, __b
, __p
);
35324 __extension__
extern __inline float32x4_t
35325 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35326 __arm_veorq_x (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
35328 return __arm_veorq_x_f32 (__a
, __b
, __p
);
35331 __extension__
extern __inline float16x8_t
35332 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35333 __arm_vornq_x (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
35335 return __arm_vornq_x_f16 (__a
, __b
, __p
);
35338 __extension__
extern __inline float32x4_t
35339 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35340 __arm_vornq_x (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
35342 return __arm_vornq_x_f32 (__a
, __b
, __p
);
35345 __extension__
extern __inline float16x8_t
35346 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35347 __arm_vorrq_x (float16x8_t __a
, float16x8_t __b
, mve_pred16_t __p
)
35349 return __arm_vorrq_x_f16 (__a
, __b
, __p
);
35352 __extension__
extern __inline float32x4_t
35353 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35354 __arm_vorrq_x (float32x4_t __a
, float32x4_t __b
, mve_pred16_t __p
)
35356 return __arm_vorrq_x_f32 (__a
, __b
, __p
);
35359 __extension__
extern __inline float16x8_t
35360 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35361 __arm_vrev32q_x (float16x8_t __a
, mve_pred16_t __p
)
35363 return __arm_vrev32q_x_f16 (__a
, __p
);
35366 __extension__
extern __inline float16x8_t
35367 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35368 __arm_vrev64q_x (float16x8_t __a
, mve_pred16_t __p
)
35370 return __arm_vrev64q_x_f16 (__a
, __p
);
35373 __extension__
extern __inline float32x4_t
35374 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35375 __arm_vrev64q_x (float32x4_t __a
, mve_pred16_t __p
)
35377 return __arm_vrev64q_x_f32 (__a
, __p
);
35380 __extension__
extern __inline float16x8x4_t
35381 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35382 __arm_vld4q (float16_t
const * __addr
)
35384 return __arm_vld4q_f16 (__addr
);
35387 __extension__
extern __inline float16x8x2_t
35388 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35389 __arm_vld2q (float16_t
const * __addr
)
35391 return __arm_vld2q_f16 (__addr
);
35394 __extension__
extern __inline float16x8_t
35395 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35396 __arm_vld1q_z (float16_t
const *__base
, mve_pred16_t __p
)
35398 return __arm_vld1q_z_f16 (__base
, __p
);
35401 __extension__
extern __inline
void
35402 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35403 __arm_vst2q (float16_t
* __addr
, float16x8x2_t __value
)
35405 __arm_vst2q_f16 (__addr
, __value
);
35408 __extension__
extern __inline
void
35409 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35410 __arm_vst1q_p (float16_t
* __addr
, float16x8_t __value
, mve_pred16_t __p
)
35412 __arm_vst1q_p_f16 (__addr
, __value
, __p
);
35415 __extension__
extern __inline float32x4x4_t
35416 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35417 __arm_vld4q (float32_t
const * __addr
)
35419 return __arm_vld4q_f32 (__addr
);
35422 __extension__
extern __inline float32x4x2_t
35423 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35424 __arm_vld2q (float32_t
const * __addr
)
35426 return __arm_vld2q_f32 (__addr
);
35429 __extension__
extern __inline float32x4_t
35430 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35431 __arm_vld1q_z (float32_t
const *__base
, mve_pred16_t __p
)
35433 return __arm_vld1q_z_f32 (__base
, __p
);
35436 __extension__
extern __inline
void
35437 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35438 __arm_vst2q (float32_t
* __addr
, float32x4x2_t __value
)
35440 __arm_vst2q_f32 (__addr
, __value
);
35443 __extension__
extern __inline
void
35444 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35445 __arm_vst1q_p (float32_t
* __addr
, float32x4_t __value
, mve_pred16_t __p
)
35447 __arm_vst1q_p_f32 (__addr
, __value
, __p
);
35450 __extension__
extern __inline float16x8_t
35451 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35452 __arm_vsetq_lane (float16_t __a
, float16x8_t __b
, const int __idx
)
35454 return __arm_vsetq_lane_f16 (__a
, __b
, __idx
);
35457 __extension__
extern __inline float32x4_t
35458 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35459 __arm_vsetq_lane (float32_t __a
, float32x4_t __b
, const int __idx
)
35461 return __arm_vsetq_lane_f32 (__a
, __b
, __idx
);
35464 __extension__
extern __inline float16_t
35465 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35466 __arm_vgetq_lane (float16x8_t __a
, const int __idx
)
35468 return __arm_vgetq_lane_f16 (__a
, __idx
);
35471 __extension__
extern __inline float32_t
35472 __attribute__ ((__always_inline__
, __gnu_inline__
, __artificial__
))
35473 __arm_vgetq_lane (float32x4_t __a
, const int __idx
)
35475 return __arm_vgetq_lane_f32 (__a
, __idx
);
35477 #endif /* MVE Floating point. */
35481 __ARM_mve_type_fp_n
= 1,
35482 __ARM_mve_type_int_n
,
35483 __ARM_mve_type_float16_t_ptr
,
35484 __ARM_mve_type_float16x8_t
,
35485 __ARM_mve_type_float16x8x2_t
,
35486 __ARM_mve_type_float16x8x4_t
,
35487 __ARM_mve_type_float32_t_ptr
,
35488 __ARM_mve_type_float32x4_t
,
35489 __ARM_mve_type_float32x4x2_t
,
35490 __ARM_mve_type_float32x4x4_t
,
35491 __ARM_mve_type_int16_t_ptr
,
35492 __ARM_mve_type_int16x8_t
,
35493 __ARM_mve_type_int16x8x2_t
,
35494 __ARM_mve_type_int16x8x4_t
,
35495 __ARM_mve_type_int32_t_ptr
,
35496 __ARM_mve_type_int32x4_t
,
35497 __ARM_mve_type_int32x4x2_t
,
35498 __ARM_mve_type_int32x4x4_t
,
35499 __ARM_mve_type_int64_t_ptr
,
35500 __ARM_mve_type_int64x2_t
,
35501 __ARM_mve_type_int8_t_ptr
,
35502 __ARM_mve_type_int8x16_t
,
35503 __ARM_mve_type_int8x16x2_t
,
35504 __ARM_mve_type_int8x16x4_t
,
35505 __ARM_mve_type_uint16_t_ptr
,
35506 __ARM_mve_type_uint16x8_t
,
35507 __ARM_mve_type_uint16x8x2_t
,
35508 __ARM_mve_type_uint16x8x4_t
,
35509 __ARM_mve_type_uint32_t_ptr
,
35510 __ARM_mve_type_uint32x4_t
,
35511 __ARM_mve_type_uint32x4x2_t
,
35512 __ARM_mve_type_uint32x4x4_t
,
35513 __ARM_mve_type_uint64_t_ptr
,
35514 __ARM_mve_type_uint64x2_t
,
35515 __ARM_mve_type_uint8_t_ptr
,
35516 __ARM_mve_type_uint8x16_t
,
35517 __ARM_mve_type_uint8x16x2_t
,
35518 __ARM_mve_type_uint8x16x4_t
,
35519 __ARM_mve_unsupported_type
35522 #if (__ARM_FEATURE_MVE & 2) /* MVE Floating point. */
35523 #define __ARM_mve_typeid(x) _Generic(x, \
35524 float16_t: __ARM_mve_type_fp_n, \
35525 float16_t *: __ARM_mve_type_float16_t_ptr, \
35526 float16_t const *: __ARM_mve_type_float16_t_ptr, \
35527 float16x8_t: __ARM_mve_type_float16x8_t, \
35528 float16x8x2_t: __ARM_mve_type_float16x8x2_t, \
35529 float16x8x4_t: __ARM_mve_type_float16x8x4_t, \
35530 float32_t: __ARM_mve_type_fp_n, \
35531 float32_t *: __ARM_mve_type_float32_t_ptr, \
35532 float32_t const *: __ARM_mve_type_float32_t_ptr, \
35533 float32x4_t: __ARM_mve_type_float32x4_t, \
35534 float32x4x2_t: __ARM_mve_type_float32x4x2_t, \
35535 float32x4x4_t: __ARM_mve_type_float32x4x4_t, \
35536 int16_t: __ARM_mve_type_int_n, \
35537 int16_t *: __ARM_mve_type_int16_t_ptr, \
35538 int16_t const *: __ARM_mve_type_int16_t_ptr, \
35539 int16x8_t: __ARM_mve_type_int16x8_t, \
35540 int16x8x2_t: __ARM_mve_type_int16x8x2_t, \
35541 int16x8x4_t: __ARM_mve_type_int16x8x4_t, \
35542 int32_t: __ARM_mve_type_int_n, \
35543 int32_t *: __ARM_mve_type_int32_t_ptr, \
35544 int32_t const *: __ARM_mve_type_int32_t_ptr, \
35545 int32x4_t: __ARM_mve_type_int32x4_t, \
35546 int32x4x2_t: __ARM_mve_type_int32x4x2_t, \
35547 int32x4x4_t: __ARM_mve_type_int32x4x4_t, \
35548 int64_t: __ARM_mve_type_int_n, \
35549 int64_t *: __ARM_mve_type_int64_t_ptr, \
35550 int64_t const *: __ARM_mve_type_int64_t_ptr, \
35551 int64x2_t: __ARM_mve_type_int64x2_t, \
35552 int8_t: __ARM_mve_type_int_n, \
35553 int8_t *: __ARM_mve_type_int8_t_ptr, \
35554 int8_t const *: __ARM_mve_type_int8_t_ptr, \
35555 int8x16_t: __ARM_mve_type_int8x16_t, \
35556 int8x16x2_t: __ARM_mve_type_int8x16x2_t, \
35557 int8x16x4_t: __ARM_mve_type_int8x16x4_t, \
35558 uint16_t: __ARM_mve_type_int_n, \
35559 uint16_t *: __ARM_mve_type_uint16_t_ptr, \
35560 uint16_t const *: __ARM_mve_type_uint16_t_ptr, \
35561 uint16x8_t: __ARM_mve_type_uint16x8_t, \
35562 uint16x8x2_t: __ARM_mve_type_uint16x8x2_t, \
35563 uint16x8x4_t: __ARM_mve_type_uint16x8x4_t, \
35564 uint32_t: __ARM_mve_type_int_n, \
35565 uint32_t *: __ARM_mve_type_uint32_t_ptr, \
35566 uint32_t const *: __ARM_mve_type_uint32_t_ptr, \
35567 uint32x4_t: __ARM_mve_type_uint32x4_t, \
35568 uint32x4x2_t: __ARM_mve_type_uint32x4x2_t, \
35569 uint32x4x4_t: __ARM_mve_type_uint32x4x4_t, \
35570 uint64_t: __ARM_mve_type_int_n, \
35571 uint64_t *: __ARM_mve_type_uint64_t_ptr, \
35572 uint64_t const *: __ARM_mve_type_uint64_t_ptr, \
35573 uint64x2_t: __ARM_mve_type_uint64x2_t, \
35574 uint8_t: __ARM_mve_type_int_n, \
35575 uint8_t *: __ARM_mve_type_uint8_t_ptr, \
35576 uint8_t const *: __ARM_mve_type_uint8_t_ptr, \
35577 uint8x16_t: __ARM_mve_type_uint8x16_t, \
35578 uint8x16x2_t: __ARM_mve_type_uint8x16x2_t, \
35579 uint8x16x4_t: __ARM_mve_type_uint8x16x4_t, \
35580 default: _Generic(x, \
35581 signed char: __ARM_mve_type_int_n, \
35582 short: __ARM_mve_type_int_n, \
35583 int: __ARM_mve_type_int_n, \
35584 long: __ARM_mve_type_int_n, \
35585 _Float16: __ARM_mve_type_fp_n, \
35586 __fp16: __ARM_mve_type_fp_n, \
35587 float: __ARM_mve_type_fp_n, \
35588 double: __ARM_mve_type_fp_n, \
35589 long long: __ARM_mve_type_int_n, \
35590 unsigned char: __ARM_mve_type_int_n, \
35591 unsigned short: __ARM_mve_type_int_n, \
35592 unsigned int: __ARM_mve_type_int_n, \
35593 unsigned long: __ARM_mve_type_int_n, \
35594 unsigned long long: __ARM_mve_type_int_n, \
35595 default: __ARM_mve_unsupported_type))
35597 #define __ARM_mve_typeid(x) _Generic(x, \
35598 int16_t: __ARM_mve_type_int_n, \
35599 int16_t *: __ARM_mve_type_int16_t_ptr, \
35600 int16_t const *: __ARM_mve_type_int16_t_ptr, \
35601 int16x8_t: __ARM_mve_type_int16x8_t, \
35602 int16x8x2_t: __ARM_mve_type_int16x8x2_t, \
35603 int16x8x4_t: __ARM_mve_type_int16x8x4_t, \
35604 int32_t: __ARM_mve_type_int_n, \
35605 int32_t *: __ARM_mve_type_int32_t_ptr, \
35606 int32_t const *: __ARM_mve_type_int32_t_ptr, \
35607 int32x4_t: __ARM_mve_type_int32x4_t, \
35608 int32x4x2_t: __ARM_mve_type_int32x4x2_t, \
35609 int32x4x4_t: __ARM_mve_type_int32x4x4_t, \
35610 int64_t: __ARM_mve_type_int_n, \
35611 int64_t *: __ARM_mve_type_int64_t_ptr, \
35612 int64_t const *: __ARM_mve_type_int64_t_ptr, \
35613 int64x2_t: __ARM_mve_type_int64x2_t, \
35614 int8_t: __ARM_mve_type_int_n, \
35615 int8_t *: __ARM_mve_type_int8_t_ptr, \
35616 int8_t const *: __ARM_mve_type_int8_t_ptr, \
35617 int8x16_t: __ARM_mve_type_int8x16_t, \
35618 int8x16x2_t: __ARM_mve_type_int8x16x2_t, \
35619 int8x16x4_t: __ARM_mve_type_int8x16x4_t, \
35620 uint16_t: __ARM_mve_type_int_n, \
35621 uint16_t *: __ARM_mve_type_uint16_t_ptr, \
35622 uint16_t const *: __ARM_mve_type_uint16_t_ptr, \
35623 uint16x8_t: __ARM_mve_type_uint16x8_t, \
35624 uint16x8x2_t: __ARM_mve_type_uint16x8x2_t, \
35625 uint16x8x4_t: __ARM_mve_type_uint16x8x4_t, \
35626 uint32_t: __ARM_mve_type_int_n, \
35627 uint32_t *: __ARM_mve_type_uint32_t_ptr, \
35628 uint32_t const *: __ARM_mve_type_uint32_t_ptr, \
35629 uint32x4_t: __ARM_mve_type_uint32x4_t, \
35630 uint32x4x2_t: __ARM_mve_type_uint32x4x2_t, \
35631 uint32x4x4_t: __ARM_mve_type_uint32x4x4_t, \
35632 uint64_t: __ARM_mve_type_int_n, \
35633 uint64_t *: __ARM_mve_type_uint64_t_ptr, \
35634 uint64_t const *: __ARM_mve_type_uint64_t_ptr, \
35635 uint64x2_t: __ARM_mve_type_uint64x2_t, \
35636 uint8_t: __ARM_mve_type_int_n, \
35637 uint8_t *: __ARM_mve_type_uint8_t_ptr, \
35638 uint8_t const *: __ARM_mve_type_uint8_t_ptr, \
35639 uint8x16_t: __ARM_mve_type_uint8x16_t, \
35640 uint8x16x2_t: __ARM_mve_type_uint8x16x2_t, \
35641 uint8x16x4_t: __ARM_mve_type_uint8x16x4_t, \
35642 default: _Generic(x, \
35643 signed char: __ARM_mve_type_int_n, \
35644 short: __ARM_mve_type_int_n, \
35645 int: __ARM_mve_type_int_n, \
35646 long: __ARM_mve_type_int_n, \
35647 long long: __ARM_mve_type_int_n, \
35648 unsigned char: __ARM_mve_type_int_n, \
35649 unsigned short: __ARM_mve_type_int_n, \
35650 unsigned int: __ARM_mve_type_int_n, \
35651 unsigned long: __ARM_mve_type_int_n, \
35652 unsigned long long: __ARM_mve_type_int_n, \
35653 default: __ARM_mve_unsupported_type))
35654 #endif /* MVE Floating point. */
35656 extern void *__ARM_undef
;
35657 #define __ARM_mve_coerce(param, type) \
35658 _Generic(param, type: param, default: *(type *)__ARM_undef)
35659 #define __ARM_mve_coerce1(param, type) \
35660 _Generic(param, type: param, const type: param, default: *(type *)__ARM_undef)
35661 #define __ARM_mve_coerce2(param, type) \
35662 _Generic(param, type: param, float16_t: param, float32_t: param, default: *(type *)__ARM_undef)
35663 #define __ARM_mve_coerce3(param, type) \
35664 _Generic(param, type: param, int8_t: param, int16_t: param, int32_t: param, int64_t: param, uint8_t: param, uint16_t: param, uint32_t: param, uint64_t: param, default: *(type *)__ARM_undef)
35666 #if (__ARM_FEATURE_MVE & 2) /* MVE Floating point. */
35668 #define __arm_vst4q(p0,p1) ({ __typeof(p0) __p0 = (p0); \
35669 __typeof(p1) __p1 = (p1); \
35670 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
35671 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_int8x16x4_t]: __arm_vst4q_s8 (__ARM_mve_coerce(__p0, int8_t *), __ARM_mve_coerce(__p1, int8x16x4_t)), \
35672 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_int16x8x4_t]: __arm_vst4q_s16 (__ARM_mve_coerce(__p0, int16_t *), __ARM_mve_coerce(__p1, int16x8x4_t)), \
35673 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4x4_t]: __arm_vst4q_s32 (__ARM_mve_coerce(__p0, int32_t *), __ARM_mve_coerce(__p1, int32x4x4_t)), \
35674 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint8x16x4_t]: __arm_vst4q_u8 (__ARM_mve_coerce(__p0, uint8_t *), __ARM_mve_coerce(__p1, uint8x16x4_t)), \
35675 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8x4_t]: __arm_vst4q_u16 (__ARM_mve_coerce(__p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8x4_t)), \
35676 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4x4_t]: __arm_vst4q_u32 (__ARM_mve_coerce(__p0, uint32_t *), __ARM_mve_coerce(__p1, uint32x4x4_t)), \
35677 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_float16x8x4_t]: __arm_vst4q_f16 (__ARM_mve_coerce(__p0, float16_t *), __ARM_mve_coerce(__p1, float16x8x4_t)), \
35678 int (*)[__ARM_mve_type_float32_t_ptr][__ARM_mve_type_float32x4x4_t]: __arm_vst4q_f32 (__ARM_mve_coerce(__p0, float32_t *), __ARM_mve_coerce(__p1, float32x4x4_t)));})
35680 #define __arm_vrndxq(p0) ({ __typeof(p0) __p0 = (p0); \
35681 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35682 int (*)[__ARM_mve_type_float16x8_t]: __arm_vrndxq_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
35683 int (*)[__ARM_mve_type_float32x4_t]: __arm_vrndxq_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
35685 #define __arm_vrndq(p0) ({ __typeof(p0) __p0 = (p0); \
35686 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35687 int (*)[__ARM_mve_type_float16x8_t]: __arm_vrndq_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
35688 int (*)[__ARM_mve_type_float32x4_t]: __arm_vrndq_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
35690 #define __arm_vrndpq(p0) ({ __typeof(p0) __p0 = (p0); \
35691 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35692 int (*)[__ARM_mve_type_float16x8_t]: __arm_vrndpq_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
35693 int (*)[__ARM_mve_type_float32x4_t]: __arm_vrndpq_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
35695 #define __arm_vrndnq(p0) ({ __typeof(p0) __p0 = (p0); \
35696 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35697 int (*)[__ARM_mve_type_float16x8_t]: __arm_vrndnq_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
35698 int (*)[__ARM_mve_type_float32x4_t]: __arm_vrndnq_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
35700 #define __arm_vrndmq(p0) ({ __typeof(p0) __p0 = (p0); \
35701 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35702 int (*)[__ARM_mve_type_float16x8_t]: __arm_vrndmq_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
35703 int (*)[__ARM_mve_type_float32x4_t]: __arm_vrndmq_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
35705 #define __arm_vrndaq(p0) ({ __typeof(p0) __p0 = (p0); \
35706 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35707 int (*)[__ARM_mve_type_float16x8_t]: __arm_vrndaq_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
35708 int (*)[__ARM_mve_type_float32x4_t]: __arm_vrndaq_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
35710 #define __arm_vrev64q(p0) ({ __typeof(p0) __p0 = (p0); \
35711 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35712 int (*)[__ARM_mve_type_int8x16_t]: __arm_vrev64q_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
35713 int (*)[__ARM_mve_type_int16x8_t]: __arm_vrev64q_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
35714 int (*)[__ARM_mve_type_int32x4_t]: __arm_vrev64q_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
35715 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vrev64q_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
35716 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vrev64q_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
35717 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vrev64q_u32 (__ARM_mve_coerce(__p0, uint32x4_t)), \
35718 int (*)[__ARM_mve_type_float16x8_t]: __arm_vrev64q_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
35719 int (*)[__ARM_mve_type_float32x4_t]: __arm_vrev64q_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
35721 #define __arm_vnegq(p0) ({ __typeof(p0) __p0 = (p0); \
35722 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35723 int (*)[__ARM_mve_type_int8x16_t]: __arm_vnegq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
35724 int (*)[__ARM_mve_type_int16x8_t]: __arm_vnegq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
35725 int (*)[__ARM_mve_type_int32x4_t]: __arm_vnegq_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
35726 int (*)[__ARM_mve_type_float16x8_t]: __arm_vnegq_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
35727 int (*)[__ARM_mve_type_float32x4_t]: __arm_vnegq_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
35729 #define __arm_vdupq_n(p0) ({ __typeof(p0) __p0 = (p0); \
35730 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35731 int (*)[__ARM_mve_type_float16x8_t]: __arm_vdupq_n_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
35732 int (*)[__ARM_mve_type_float32x4_t]: __arm_vdupq_n_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
35734 #define __arm_vabsq(p0) ({ __typeof(p0) __p0 = (p0); \
35735 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35736 int (*)[__ARM_mve_type_int8x16_t]: __arm_vabsq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
35737 int (*)[__ARM_mve_type_int16x8_t]: __arm_vabsq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
35738 int (*)[__ARM_mve_type_int32x4_t]: __arm_vabsq_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
35739 int (*)[__ARM_mve_type_float16x8_t]: __arm_vabsq_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
35740 int (*)[__ARM_mve_type_float32x4_t]: __arm_vabsq_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
35742 #define __arm_vrev32q(p0) ({ __typeof(p0) __p0 = (p0); \
35743 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35744 int (*)[__ARM_mve_type_int8x16_t]: __arm_vrev32q_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
35745 int (*)[__ARM_mve_type_int16x8_t]: __arm_vrev32q_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
35746 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vrev32q_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
35747 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vrev32q_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
35748 int (*)[__ARM_mve_type_float16x8_t]: __arm_vrev32q_f16 (__ARM_mve_coerce(__p0, float16x8_t)));})
35750 #define __arm_vcvtbq_f32(p0) ({ __typeof(p0) __p0 = (p0); \
35751 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35752 int (*)[__ARM_mve_type_float16x8_t]: __arm_vcvtbq_f32_f16 (__ARM_mve_coerce(__p0, float16x8_t)));})
35754 #define __arm_vcvttq_f32(p0) ({ __typeof(p0) __p0 = (p0); \
35755 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35756 int (*)[__ARM_mve_type_float16x8_t]: __arm_vcvttq_f32_f16 (__ARM_mve_coerce(__p0, float16x8_t)));})
35758 #define __arm_vrev16q(p0) ({ __typeof(p0) __p0 = (p0); \
35759 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35760 int (*)[__ARM_mve_type_int8x16_t]: __arm_vrev16q_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
35761 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vrev16q_u8 (__ARM_mve_coerce(__p0, uint8x16_t)));})
35763 #define __arm_vqabsq(p0) ({ __typeof(p0) __p0 = (p0); \
35764 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35765 int (*)[__ARM_mve_type_int8x16_t]: __arm_vqabsq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
35766 int (*)[__ARM_mve_type_int16x8_t]: __arm_vqabsq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
35767 int (*)[__ARM_mve_type_int32x4_t]: __arm_vqabsq_s32 (__ARM_mve_coerce(__p0, int32x4_t)));})
35769 #define __arm_vqnegq(p0) ({ __typeof(p0) __p0 = (p0); \
35770 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35771 int (*)[__ARM_mve_type_int8x16_t]: __arm_vqnegq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
35772 int (*)[__ARM_mve_type_int16x8_t]: __arm_vqnegq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
35773 int (*)[__ARM_mve_type_int32x4_t]: __arm_vqnegq_s32 (__ARM_mve_coerce(__p0, int32x4_t)));})
35775 #define __arm_vmvnq(p0) ({ __typeof(p0) __p0 = (p0); \
35776 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35777 int (*)[__ARM_mve_type_int8x16_t]: __arm_vmvnq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
35778 int (*)[__ARM_mve_type_int16x8_t]: __arm_vmvnq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
35779 int (*)[__ARM_mve_type_int32x4_t]: __arm_vmvnq_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
35780 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vmvnq_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
35781 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vmvnq_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
35782 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vmvnq_u32 (__ARM_mve_coerce(__p0, uint32x4_t)));})
35784 #define __arm_vmovlbq(p0) ({ __typeof(p0) __p0 = (p0); \
35785 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35786 int (*)[__ARM_mve_type_int8x16_t]: __arm_vmovlbq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
35787 int (*)[__ARM_mve_type_int16x8_t]: __arm_vmovlbq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
35788 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vmovlbq_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
35789 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vmovlbq_u16 (__ARM_mve_coerce(__p0, uint16x8_t)));})
35791 #define __arm_vmovltq(p0) ({ __typeof(p0) __p0 = (p0); \
35792 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35793 int (*)[__ARM_mve_type_int8x16_t]: __arm_vmovltq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
35794 int (*)[__ARM_mve_type_int16x8_t]: __arm_vmovltq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
35795 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vmovltq_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
35796 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vmovltq_u16 (__ARM_mve_coerce(__p0, uint16x8_t)));})
35798 #define __arm_vclzq(p0) ({ __typeof(p0) __p0 = (p0); \
35799 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35800 int (*)[__ARM_mve_type_int8x16_t]: __arm_vclzq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
35801 int (*)[__ARM_mve_type_int16x8_t]: __arm_vclzq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
35802 int (*)[__ARM_mve_type_int32x4_t]: __arm_vclzq_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
35803 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vclzq_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
35804 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vclzq_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
35805 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vclzq_u32 (__ARM_mve_coerce(__p0, uint32x4_t)));})
35807 #define __arm_vclsq(p0) ({ __typeof(p0) __p0 = (p0); \
35808 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35809 int (*)[__ARM_mve_type_int8x16_t]: __arm_vclsq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
35810 int (*)[__ARM_mve_type_int16x8_t]: __arm_vclsq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
35811 int (*)[__ARM_mve_type_int32x4_t]: __arm_vclsq_s32 (__ARM_mve_coerce(__p0, int32x4_t)));})
35813 #define __arm_vcvtq(p0) ({ __typeof(p0) __p0 = (p0); \
35814 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35815 int (*)[__ARM_mve_type_int16x8_t]: __arm_vcvtq_f16_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
35816 int (*)[__ARM_mve_type_int32x4_t]: __arm_vcvtq_f32_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
35817 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vcvtq_f16_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
35818 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vcvtq_f32_u32 (__ARM_mve_coerce(__p0, uint32x4_t)));})
35820 #define __arm_vshlq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
35821 __typeof(p1) __p1 = (p1); \
35822 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
35823 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vshlq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
35824 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vshlq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
35825 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vshlq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
35826 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vshlq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
35827 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vshlq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
35828 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vshlq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
35830 #define __arm_vshrq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
35831 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35832 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshrq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
35833 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshrq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
35834 int (*)[__ARM_mve_type_int32x4_t]: __arm_vshrq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
35835 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshrq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
35836 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshrq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
35837 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vshrq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1));})
35839 #define __arm_vcvtq_n(p0,p1) ({ __typeof(p0) __p0 = (p0); \
35840 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
35841 int (*)[__ARM_mve_type_int16x8_t]: __arm_vcvtq_n_f16_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
35842 int (*)[__ARM_mve_type_int32x4_t]: __arm_vcvtq_n_f32_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
35843 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vcvtq_n_f16_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
35844 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vcvtq_n_f32_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1));})
35846 #define __arm_vorrq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
35847 __typeof(p1) __p1 = (p1); \
35848 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
35849 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vorrq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
35850 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vorrq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
35851 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vorrq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
35852 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vorrq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
35853 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vorrq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
35854 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vorrq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
35855 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vorrq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
35856 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vorrq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
35858 #define __arm_vabdq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
35859 __typeof(p1) __p1 = (p1); \
35860 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
35861 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vabdq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
35862 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vabdq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
35863 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vabdq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
35864 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vabdq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
35865 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vabdq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
35866 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vabdq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
35867 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vabdq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
35868 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vabdq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
35870 #define __arm_vaddq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
35871 __typeof(p1) __p1 = (p1); \
35872 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
35873 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vaddq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
35874 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vaddq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
35875 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vaddq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
35876 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vaddq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
35877 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vaddq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
35878 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vaddq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
35879 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vaddq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
35880 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vaddq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)), \
35881 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vaddq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
35882 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vaddq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
35883 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vaddq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
35884 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vaddq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
35885 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vaddq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
35886 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vaddq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
35887 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vaddq_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce2(p1, double)), \
35888 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vaddq_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce2(p1, double)));})
35890 #define __arm_vandq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
35891 __typeof(p1) __p1 = (p1); \
35892 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
35893 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vandq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
35894 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vandq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
35895 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vandq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
35896 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vandq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
35897 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vandq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
35898 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vandq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
35899 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vandq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
35900 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vandq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
35902 #define __arm_vbicq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
35903 __typeof(p1) __p1 = (p1); \
35904 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
35905 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vbicq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce1 (__p1, int)), \
35906 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vbicq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce1 (__p1, int)), \
35907 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vbicq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce1 (__p1, int)), \
35908 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vbicq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce1 (__p1, int)), \
35909 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vbicq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
35910 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vbicq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
35911 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vbicq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
35912 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vbicq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
35913 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vbicq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
35914 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vbicq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
35915 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vbicq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
35916 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vbicq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
35918 #define __arm_vornq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
35919 __typeof(p1) __p1 = (p1); \
35920 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
35921 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vornq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
35922 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vornq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
35923 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vornq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
35924 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vornq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
35925 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vornq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
35926 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vornq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
35927 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vornq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
35928 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vornq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
35930 #define __arm_vmulq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
35931 __typeof(p1) __p1 = (p1); \
35932 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
35933 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vmulq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
35934 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vmulq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
35935 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vmulq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
35936 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vmulq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
35937 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vmulq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
35938 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vmulq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
35939 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vmulq_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce2(p1, double)), \
35940 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vmulq_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce2(p1, double)), \
35941 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmulq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
35942 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmulq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
35943 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmulq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
35944 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
35945 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
35946 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmulq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
35947 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vmulq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
35948 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vmulq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
35950 #define __arm_vcaddq_rot270(p0,p1) ({ __typeof(p0) __p0 = (p0); \
35951 __typeof(p1) __p1 = (p1); \
35952 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
35953 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcaddq_rot270_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
35954 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcaddq_rot270_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
35955 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcaddq_rot270_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
35956 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcaddq_rot270_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
35957 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcaddq_rot270_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
35958 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcaddq_rot270_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
35959 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcaddq_rot270_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
35960 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcaddq_rot270_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
35962 #define __arm_vcmpeqq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
35963 __typeof(p1) __p1 = (p1); \
35964 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
35965 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
35966 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
35967 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
35968 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
35969 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
35970 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
35971 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vcmpeqq_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce2(p1, double)), \
35972 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vcmpeqq_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce2(p1, double)), \
35973 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpeqq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
35974 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpeqq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
35975 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpeqq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
35976 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcmpeqq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
35977 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcmpeqq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
35978 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcmpeqq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
35979 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmpeqq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
35980 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmpeqq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
35982 #define __arm_vcaddq_rot90(p0,p1) ({ __typeof(p0) __p0 = (p0); \
35983 __typeof(p1) __p1 = (p1); \
35984 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
35985 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcaddq_rot90_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
35986 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcaddq_rot90_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
35987 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcaddq_rot90_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
35988 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcaddq_rot90_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
35989 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcaddq_rot90_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
35990 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcaddq_rot90_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
35991 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcaddq_rot90_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
35992 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcaddq_rot90_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
35994 #define __arm_vcmpeqq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
35995 __typeof(p1) __p1 = (p1); \
35996 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
35997 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpeqq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
35998 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpeqq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
35999 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpeqq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
36000 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcmpeqq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
36001 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcmpeqq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
36002 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcmpeqq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
36003 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int), p2), \
36004 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int), p2), \
36005 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int), p2), \
36006 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int), p2), \
36007 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int), p2), \
36008 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int), p2), \
36009 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmpeqq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
36010 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmpeqq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2), \
36011 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vcmpeqq_m_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce2(p1, double), p2), \
36012 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vcmpeqq_m_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce2(p1, double), p2));})
36014 #define __arm_vcmpgtq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36015 __typeof(p1) __p1 = (p1); \
36016 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36017 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpgtq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36018 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpgtq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36019 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpgtq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36020 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpgtq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
36021 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpgtq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
36022 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpgtq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
36023 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmpgtq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
36024 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmpgtq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)), \
36025 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vcmpgtq_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce2(p1, double)), \
36026 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vcmpgtq_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce2(p1, double)));})
36028 #define __arm_vcmpleq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36029 __typeof(p1) __p1 = (p1); \
36030 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36031 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpleq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36032 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpleq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36033 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpleq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36034 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmpleq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
36035 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmpleq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)), \
36036 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpleq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
36037 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpleq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
36038 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpleq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
36039 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vcmpleq_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce2(p1, double)), \
36040 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vcmpleq_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce2(p1, double)));})
36042 #define __arm_vcmpltq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36043 __typeof(p1) __p1 = (p1); \
36044 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36045 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpltq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36046 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpltq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36047 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpltq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36048 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpltq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
36049 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpltq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
36050 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpltq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
36051 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmpltq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
36052 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmpltq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)), \
36053 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vcmpltq_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce2(p1, double)), \
36054 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vcmpltq_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce2(p1, double)));})
36056 #define __arm_vcmpneq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36057 __typeof(p1) __p1 = (p1); \
36058 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36059 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpneq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
36060 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpneq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
36061 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpneq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
36062 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vcmpneq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
36063 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vcmpneq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
36064 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vcmpneq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
36065 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vcmpneq_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce2(p1, double)), \
36066 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vcmpneq_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce2(p1, double)), \
36067 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpneq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36068 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpneq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36069 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpneq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36070 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcmpneq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
36071 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcmpneq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36072 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcmpneq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
36073 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmpneq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
36074 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmpneq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
36076 #define __arm_vcmulq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36077 __typeof(p1) __p1 = (p1); \
36078 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36079 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmulq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
36080 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmulq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
36082 #define __arm_vcmulq_rot180(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36083 __typeof(p1) __p1 = (p1); \
36084 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36085 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmulq_rot180_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
36086 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmulq_rot180_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
36088 #define __arm_vcmulq_rot270(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36089 __typeof(p1) __p1 = (p1); \
36090 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36091 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmulq_rot270_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
36092 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmulq_rot270_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
36094 #define __arm_vcmulq_rot90(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36095 __typeof(p1) __p1 = (p1); \
36096 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36097 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmulq_rot90_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
36098 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmulq_rot90_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
36100 #define __arm_veorq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36101 __typeof(p1) __p1 = (p1); \
36102 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36103 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_veorq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36104 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_veorq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36105 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_veorq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36106 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_veorq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
36107 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_veorq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36108 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_veorq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
36109 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_veorq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
36110 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_veorq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
36112 #define __arm_vmaxnmaq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36113 __typeof(p1) __p1 = (p1); \
36114 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36115 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vmaxnmaq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
36116 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vmaxnmaq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
36118 #define __arm_vmaxnmavq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36119 __typeof(p1) __p1 = (p1); \
36120 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36121 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float16x8_t]: __arm_vmaxnmavq_f16 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float16x8_t)), \
36122 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float32x4_t]: __arm_vmaxnmavq_f32 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float32x4_t)));})
36124 #define __arm_vmaxnmq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36125 __typeof(p1) __p1 = (p1); \
36126 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36127 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vmaxnmq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
36128 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vmaxnmq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
36130 #define __arm_vmaxnmvq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36131 __typeof(p1) __p1 = (p1); \
36132 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36133 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float16x8_t]: __arm_vmaxnmvq_f16 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float16x8_t)), \
36134 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float32x4_t]: __arm_vmaxnmvq_f32 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float32x4_t)));})
36136 #define __arm_vmaxnmvq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36137 __typeof(p1) __p1 = (p1); \
36138 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36139 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float16x8_t]: __arm_vmaxnmvq_f16 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float16x8_t)), \
36140 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float32x4_t]: __arm_vmaxnmvq_f32 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float32x4_t)));})
36142 #define __arm_vminnmaq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36143 __typeof(p1) __p1 = (p1); \
36144 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36145 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vminnmaq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
36146 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vminnmaq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
36148 #define __arm_vminnmavq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36149 __typeof(p1) __p1 = (p1); \
36150 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36151 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float16x8_t]: __arm_vminnmavq_f16 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float16x8_t)), \
36152 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float32x4_t]: __arm_vminnmavq_f32 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float32x4_t)));})
36154 #define __arm_vbrsrq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36155 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
36156 int (*)[__ARM_mve_type_int8x16_t]: __arm_vbrsrq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
36157 int (*)[__ARM_mve_type_int16x8_t]: __arm_vbrsrq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
36158 int (*)[__ARM_mve_type_int32x4_t]: __arm_vbrsrq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
36159 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vbrsrq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
36160 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vbrsrq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
36161 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vbrsrq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1), \
36162 int (*)[__ARM_mve_type_float16x8_t]: __arm_vbrsrq_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), p1), \
36163 int (*)[__ARM_mve_type_float32x4_t]: __arm_vbrsrq_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), p1));})
36165 #define __arm_vminnmq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36166 __typeof(p1) __p1 = (p1); \
36167 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36168 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vminnmq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
36169 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vminnmq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
36171 #define __arm_vsubq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36172 __typeof(p1) __p1 = (p1); \
36173 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36174 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vsubq_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce2(p1, double)), \
36175 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vsubq_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce2(p1, double)), \
36176 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vsubq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
36177 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vsubq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
36178 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vsubq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
36179 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vsubq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
36180 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vsubq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
36181 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vsubq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
36182 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vsubq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36183 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vsubq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36184 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vsubq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36185 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vsubq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
36186 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vsubq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36187 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vsubq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
36188 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vsubq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
36189 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vsubq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)));})
36191 #define __arm_vminnmvq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36192 __typeof(p1) __p1 = (p1); \
36193 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36194 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float16x8_t]: __arm_vminnmvq_f16 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float16x8_t)), \
36195 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float32x4_t]: __arm_vminnmvq_f32 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float32x4_t)));})
36197 #define __arm_vshlq_r(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36198 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
36199 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshlq_r_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
36200 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshlq_r_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
36201 int (*)[__ARM_mve_type_int32x4_t]: __arm_vshlq_r_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
36202 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshlq_r_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
36203 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshlq_r_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
36204 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vshlq_r_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1));})
36206 #define __arm_vshlq_n(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36207 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
36208 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshlq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
36209 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshlq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
36210 int (*)[__ARM_mve_type_int32x4_t]: __arm_vshlq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
36211 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshlq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
36212 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshlq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
36213 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vshlq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1));})
36215 #define __arm_vshlltq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36216 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
36217 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshlltq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
36218 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshlltq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
36219 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshlltq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
36220 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshlltq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1));})
36222 #define __arm_vshllbq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36223 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
36224 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshllbq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
36225 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshllbq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
36226 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshllbq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
36227 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshllbq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1));})
36229 #define __arm_vrshrq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36230 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
36231 int (*)[__ARM_mve_type_int8x16_t]: __arm_vrshrq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
36232 int (*)[__ARM_mve_type_int16x8_t]: __arm_vrshrq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
36233 int (*)[__ARM_mve_type_int32x4_t]: __arm_vrshrq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
36234 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vrshrq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
36235 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vrshrq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
36236 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vrshrq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1));})
36238 #define __arm_vrshrq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36239 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
36240 int (*)[__ARM_mve_type_int8x16_t]: __arm_vrshrq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
36241 int (*)[__ARM_mve_type_int16x8_t]: __arm_vrshrq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
36242 int (*)[__ARM_mve_type_int32x4_t]: __arm_vrshrq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
36243 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vrshrq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
36244 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vrshrq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
36245 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vrshrq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1));})
36247 #define __arm_vrshlq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36248 __typeof(p1) __p1 = (p1); \
36249 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36250 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vrshlq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
36251 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vrshlq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
36252 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vrshlq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
36253 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vrshlq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
36254 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vrshlq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
36255 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vrshlq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
36256 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrshlq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36257 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrshlq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36258 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrshlq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36259 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrshlq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36260 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrshlq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36261 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrshlq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
36263 #define __arm_vrmulhq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36264 __typeof(p1) __p1 = (p1); \
36265 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36266 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrmulhq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36267 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrmulhq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36268 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrmulhq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36269 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vrmulhq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
36270 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vrmulhq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36271 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vrmulhq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
36273 #define __arm_vrhaddq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36274 __typeof(p1) __p1 = (p1); \
36275 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36276 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrhaddq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36277 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrhaddq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36278 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrhaddq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36279 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vrhaddq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
36280 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vrhaddq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36281 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vrhaddq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
36283 #define __arm_vqsubq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36284 __typeof(p1) __p1 = (p1); \
36285 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36286 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqsubq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
36287 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqsubq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
36288 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqsubq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
36289 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vqsubq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
36290 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vqsubq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
36291 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vqsubq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
36292 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqsubq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36293 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqsubq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36294 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqsubq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36295 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vqsubq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
36296 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vqsubq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36297 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vqsubq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
36299 #define __arm_vqshluq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36300 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
36301 int (*)[__ARM_mve_type_int8x16_t]: __arm_vqshluq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
36302 int (*)[__ARM_mve_type_int16x8_t]: __arm_vqshluq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
36303 int (*)[__ARM_mve_type_int32x4_t]: __arm_vqshluq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1));})
36305 #define __arm_vqshlq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36306 __typeof(p1) __p1 = (p1); \
36307 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36308 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqshlq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36309 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqshlq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36310 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqshlq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36311 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqshlq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36312 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqshlq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36313 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqshlq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
36315 #define __arm_vqshlq_r(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36316 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
36317 int (*)[__ARM_mve_type_int8x16_t]: __arm_vqshlq_r_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
36318 int (*)[__ARM_mve_type_int16x8_t]: __arm_vqshlq_r_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
36319 int (*)[__ARM_mve_type_int32x4_t]: __arm_vqshlq_r_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
36320 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vqshlq_r_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
36321 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vqshlq_r_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
36322 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vqshlq_r_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1));})
36324 #define __arm_vqshlq_n(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36325 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
36326 int (*)[__ARM_mve_type_int8x16_t]: __arm_vqshlq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
36327 int (*)[__ARM_mve_type_int16x8_t]: __arm_vqshlq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
36328 int (*)[__ARM_mve_type_int32x4_t]: __arm_vqshlq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
36329 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vqshlq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
36330 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vqshlq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
36331 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vqshlq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1));})
36333 #define __arm_vqrshlq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36334 __typeof(p1) __p1 = (p1); \
36335 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36336 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrshlq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36337 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrshlq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36338 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrshlq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36339 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrshlq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36340 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrshlq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36341 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrshlq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36342 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqrshlq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
36343 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqrshlq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
36344 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqrshlq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
36345 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vqrshlq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
36346 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vqrshlq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
36347 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vqrshlq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)));})
36349 #define __arm_vqrdmulhq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36350 __typeof(p1) __p1 = (p1); \
36351 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36352 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrdmulhq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36353 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrdmulhq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36354 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrdmulhq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36355 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqrdmulhq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
36356 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqrdmulhq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
36357 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqrdmulhq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)));})
36359 #define __arm_vmlaldavxq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36360 __typeof(p1) __p1 = (p1); \
36361 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36362 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlaldavxq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36363 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlaldavxq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
36365 #define __arm_vqmovuntq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36366 __typeof(p1) __p1 = (p1); \
36367 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36368 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqmovuntq_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36369 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqmovuntq_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t)));})
36371 #define __arm_vqmovntq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36372 __typeof(p1) __p1 = (p1); \
36373 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36374 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqmovntq_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36375 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqmovntq_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36376 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqmovntq_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36377 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqmovntq_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
36379 #define __arm_vqmovnbq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36380 __typeof(p1) __p1 = (p1); \
36381 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36382 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqmovnbq_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36383 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqmovnbq_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36384 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqmovnbq_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36385 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqmovnbq_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
36387 #define __arm_vqdmulltq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36388 __typeof(p1) __p1 = (p1); \
36389 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36390 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqdmulltq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
36391 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqdmulltq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
36392 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmulltq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36393 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmulltq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
36395 #define __arm_vqmovunbq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36396 __typeof(p1) __p1 = (p1); \
36397 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36398 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqmovunbq_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36399 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqmovunbq_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t)));})
36401 #define __arm_vqdmullbq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36402 __typeof(p1) __p1 = (p1); \
36403 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36404 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqdmullbq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
36405 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqdmullbq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
36406 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmullbq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36407 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmullbq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
36409 #define __arm_vqdmulhq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36410 __typeof(p1) __p1 = (p1); \
36411 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36412 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqdmulhq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
36413 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqdmulhq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
36414 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqdmulhq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
36415 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqdmulhq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36416 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmulhq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36417 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmulhq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
36419 #define __arm_vqaddq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36420 __typeof(p1) __p1 = (p1); \
36421 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36422 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqaddq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
36423 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqaddq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
36424 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqaddq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
36425 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vqaddq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
36426 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vqaddq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
36427 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vqaddq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
36428 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqaddq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36429 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqaddq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36430 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqaddq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36431 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vqaddq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
36432 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vqaddq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36433 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vqaddq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
36435 #define __arm_vmulltq_poly(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36436 __typeof(p1) __p1 = (p1); \
36437 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36438 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulltq_poly_p8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
36439 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulltq_poly_p16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)));})
36441 #define __arm_vmullbq_poly(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36442 __typeof(p1) __p1 = (p1); \
36443 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36444 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmullbq_poly_p8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
36445 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmullbq_poly_p16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)));})
36447 #define __arm_vmulltq_int(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36448 __typeof(p1) __p1 = (p1); \
36449 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36450 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmulltq_int_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36451 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmulltq_int_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36452 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmulltq_int_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36453 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulltq_int_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
36454 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulltq_int_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36455 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmulltq_int_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
36457 #define __arm_vhaddq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36458 __typeof(p1) __p1 = (p1); \
36459 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36460 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vhaddq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
36461 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vhaddq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
36462 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vhaddq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
36463 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vhaddq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
36464 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vhaddq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
36465 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vhaddq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
36466 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vhaddq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36467 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vhaddq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36468 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vhaddq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36469 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vhaddq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
36470 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vhaddq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36471 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vhaddq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
36473 #define __arm_vhcaddq_rot270(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36474 __typeof(p1) __p1 = (p1); \
36475 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36476 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vhcaddq_rot270_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36477 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vhcaddq_rot270_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36478 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vhcaddq_rot270_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
36480 #define __arm_vhcaddq_rot90(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36481 __typeof(p1) __p1 = (p1); \
36482 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36483 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vhcaddq_rot90_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36484 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vhcaddq_rot90_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36485 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vhcaddq_rot90_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
36487 #define __arm_vhsubq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36488 __typeof(p1) __p1 = (p1); \
36489 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36490 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vhsubq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
36491 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vhsubq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
36492 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vhsubq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
36493 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vhsubq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
36494 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vhsubq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
36495 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vhsubq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
36496 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vhsubq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36497 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vhsubq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36498 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vhsubq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36499 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vhsubq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
36500 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vhsubq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36501 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vhsubq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
36503 #define __arm_vminq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36504 __typeof(p1) __p1 = (p1); \
36505 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36506 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vminq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36507 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vminq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36508 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vminq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36509 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vminq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
36510 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vminq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36511 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vminq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
36513 #define __arm_vminaq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36514 __typeof(p1) __p1 = (p1); \
36515 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36516 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vminaq_s8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36517 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vminaq_s16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36518 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vminaq_s32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
36520 #define __arm_vmaxq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36521 __typeof(p1) __p1 = (p1); \
36522 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36523 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmaxq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36524 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmaxq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36525 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmaxq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36526 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmaxq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
36527 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmaxq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36528 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmaxq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
36530 #define __arm_vmaxaq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36531 __typeof(p1) __p1 = (p1); \
36532 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36533 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmaxaq_s8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36534 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmaxaq_s16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36535 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmaxaq_s32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
36537 #define __arm_vmovntq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36538 __typeof(p1) __p1 = (p1); \
36539 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36540 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vmovntq_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36541 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vmovntq_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36542 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vmovntq_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36543 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vmovntq_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
36545 #define __arm_vmovnbq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36546 __typeof(p1) __p1 = (p1); \
36547 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36548 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vmovnbq_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36549 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vmovnbq_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36550 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vmovnbq_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36551 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vmovnbq_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
36553 #define __arm_vmulhq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36554 __typeof(p1) __p1 = (p1); \
36555 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36556 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmulhq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36557 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmulhq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36558 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmulhq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36559 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulhq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
36560 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulhq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36561 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmulhq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
36563 #define __arm_vmullbq_int(p0,p1) ({ __typeof(p0) __p0 = (p0); \
36564 __typeof(p1) __p1 = (p1); \
36565 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36566 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmullbq_int_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
36567 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmullbq_int_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
36568 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmullbq_int_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
36569 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmullbq_int_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
36570 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmullbq_int_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
36571 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmullbq_int_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
36573 #define __arm_vbicq_m_n(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36574 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
36575 int (*)[__ARM_mve_type_int16x8_t]: __arm_vbicq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1, p2), \
36576 int (*)[__ARM_mve_type_int32x4_t]: __arm_vbicq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1, p2), \
36577 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vbicq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1, p2), \
36578 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vbicq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1, p2));})
36580 #define __arm_vqrshrnbq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36581 __typeof(p1) __p1 = (p1); \
36582 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36583 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqrshrnbq_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36584 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqrshrnbq_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
36585 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqrshrnbq_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
36586 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqrshrnbq_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
36588 #define __arm_vqrshrunbq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36589 __typeof(p1) __p1 = (p1); \
36590 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36591 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqrshrunbq_n_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36592 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqrshrunbq_n_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
36594 #define __arm_vshlcq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36595 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
36596 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshlcq_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1, p2), \
36597 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshlcq_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1, p2), \
36598 int (*)[__ARM_mve_type_int32x4_t]: __arm_vshlcq_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1, p2), \
36599 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshlcq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1, p2), \
36600 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshlcq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1, p2), \
36601 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vshlcq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1, p2));})
36603 #define __arm_vclsq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36604 __typeof(p1) __p1 = (p1); \
36605 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36606 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vclsq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
36607 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vclsq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36608 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vclsq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
36610 #define __arm_vclzq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36611 __typeof(p1) __p1 = (p1); \
36612 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36613 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vclzq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
36614 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vclzq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36615 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vclzq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
36616 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vclzq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
36617 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vclzq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
36618 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vclzq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
36620 #define __arm_vmaxaq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36621 __typeof(p1) __p1 = (p1); \
36622 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36623 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmaxaq_m_s8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
36624 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmaxaq_m_s16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36625 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmaxaq_m_s32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
36627 #define __arm_vminaq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36628 __typeof(p1) __p1 = (p1); \
36629 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36630 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vminaq_m_s8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
36631 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vminaq_m_s16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36632 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vminaq_m_s32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
36634 #define __arm_vmlaq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36635 __typeof(p1) __p1 = (p1); \
36636 __typeof(p2) __p2 = (p2); \
36637 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36638 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vmlaq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int)), \
36639 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vmlaq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int)), \
36640 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vmlaq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int)), \
36641 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vmlaq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int)), \
36642 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vmlaq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int)), \
36643 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vmlaq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int)));})
36645 #define __arm_vsriq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36646 __typeof(p1) __p1 = (p1); \
36647 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36648 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vsriq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
36649 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vsriq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36650 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vsriq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
36651 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vsriq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
36652 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vsriq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
36653 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vsriq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
36655 #define __arm_vsliq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36656 __typeof(p1) __p1 = (p1); \
36657 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36658 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vsliq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
36659 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vsliq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36660 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vsliq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
36661 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vsliq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
36662 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vsliq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
36663 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vsliq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
36665 #define __arm_vshlq_m_r(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36666 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
36667 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshlq_m_r_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1, p2), \
36668 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshlq_m_r_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1, p2), \
36669 int (*)[__ARM_mve_type_int32x4_t]: __arm_vshlq_m_r_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1, p2), \
36670 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshlq_m_r_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1, p2), \
36671 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshlq_m_r_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1, p2), \
36672 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vshlq_m_r_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1, p2));})
36674 #define __arm_vrshlq_m_n(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36675 __typeof(p1) __p1 = (p1); \
36676 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
36677 int (*)[__ARM_mve_type_int8x16_t]: __arm_vrshlq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1, p2), \
36678 int (*)[__ARM_mve_type_int16x8_t]: __arm_vrshlq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1, p2), \
36679 int (*)[__ARM_mve_type_int32x4_t]: __arm_vrshlq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1, p2), \
36680 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vrshlq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1, p2), \
36681 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vrshlq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1, p2), \
36682 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vrshlq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __p1, p2));})
36684 #define __arm_vqshlq_m_r(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36685 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
36686 int (*)[__ARM_mve_type_int8x16_t]: __arm_vqshlq_m_r_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1, p2), \
36687 int (*)[__ARM_mve_type_int16x8_t]: __arm_vqshlq_m_r_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1, p2), \
36688 int (*)[__ARM_mve_type_int32x4_t]: __arm_vqshlq_m_r_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1, p2), \
36689 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vqshlq_m_r_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1, p2), \
36690 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vqshlq_m_r_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1, p2), \
36691 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vqshlq_m_r_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1, p2));})
36693 #define __arm_vqrshlq_m_n(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36694 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
36695 int (*)[__ARM_mve_type_int8x16_t]: __arm_vqrshlq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1, p2), \
36696 int (*)[__ARM_mve_type_int16x8_t]: __arm_vqrshlq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1, p2), \
36697 int (*)[__ARM_mve_type_int32x4_t]: __arm_vqrshlq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1, p2), \
36698 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vqrshlq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1, p2), \
36699 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vqrshlq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1, p2), \
36700 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vqrshlq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1, p2));})
36702 #define __arm_vqrdmlsdhxq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36703 __typeof(p1) __p1 = (p1); \
36704 __typeof(p2) __p2 = (p2); \
36705 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36706 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrdmlsdhxq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
36707 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrdmlsdhxq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
36708 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrdmlsdhxq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
36710 #define __arm_vqrdmlsdhq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36711 __typeof(p1) __p1 = (p1); \
36712 __typeof(p2) __p2 = (p2); \
36713 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36714 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrdmlsdhq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
36715 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrdmlsdhq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
36716 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrdmlsdhq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
36718 #define __arm_vqrdmlashq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36719 __typeof(p1) __p1 = (p1); \
36720 __typeof(p2) __p2 = (p2); \
36721 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36722 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqrdmlashq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int)), \
36723 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqrdmlashq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int)), \
36724 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqrdmlashq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int)));})
36726 #define __arm_vqdmlashq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36727 __typeof(p1) __p1 = (p1); \
36728 __typeof(p2) __p2 = (p2); \
36729 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36730 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqdmlashq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int)), \
36731 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqdmlashq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int)), \
36732 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqdmlashq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int)));})
36734 #define __arm_vqrdmlahq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36735 __typeof(p1) __p1 = (p1); \
36736 __typeof(p2) __p2 = (p2); \
36737 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36738 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqrdmlahq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int)), \
36739 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqrdmlahq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int)), \
36740 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqrdmlahq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int)));})
36742 #define __arm_vmlasq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36743 __typeof(p1) __p1 = (p1); \
36744 __typeof(p2) __p2 = (p2); \
36745 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36746 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vmlasq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int)), \
36747 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vmlasq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int)), \
36748 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vmlasq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int)), \
36749 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vmlasq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int)), \
36750 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vmlasq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int)), \
36751 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vmlasq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int)));})
36753 #define __arm_vqdmlahq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36754 __typeof(p1) __p1 = (p1); \
36755 __typeof(p2) __p2 = (p2); \
36756 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36757 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqdmlahq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int)), \
36758 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqdmlahq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int)), \
36759 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqdmlahq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int)));})
36761 #define __arm_vqrdmladhxq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36762 __typeof(p1) __p1 = (p1); \
36763 __typeof(p2) __p2 = (p2); \
36764 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36765 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrdmladhxq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
36766 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrdmladhxq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
36767 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrdmladhxq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
36769 #define __arm_vqrdmladhq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36770 __typeof(p1) __p1 = (p1); \
36771 __typeof(p2) __p2 = (p2); \
36772 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36773 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrdmladhq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
36774 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrdmladhq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
36775 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrdmladhq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
36777 #define __arm_vqnegq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36778 __typeof(p1) __p1 = (p1); \
36779 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36780 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqnegq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
36781 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqnegq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36782 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqnegq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
36784 #define __arm_vqdmlsdhxq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36785 __typeof(p1) __p1 = (p1); \
36786 __typeof(p2) __p2 = (p2); \
36787 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36788 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqdmlsdhxq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
36789 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmlsdhxq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
36790 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmlsdhxq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
36792 #define __arm_vqdmlsdhq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36793 __typeof(p1) __p1 = (p1); \
36794 __typeof(p2) __p2 = (p2); \
36795 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36796 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqdmlsdhq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
36797 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmlsdhq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
36798 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmlsdhq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
36800 #define __arm_vqdmladhxq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36801 __typeof(p1) __p1 = (p1); \
36802 __typeof(p2) __p2 = (p2); \
36803 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36804 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqdmladhxq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
36805 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmladhxq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
36806 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmladhxq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
36808 #define __arm_vqdmladhq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36809 __typeof(p1) __p1 = (p1); \
36810 __typeof(p2) __p2 = (p2); \
36811 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36812 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqdmladhq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
36813 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmladhq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
36814 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmladhq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
36816 #define __arm_vmovlbq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36817 __typeof(p1) __p1 = (p1); \
36818 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36819 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int8x16_t]: __arm_vmovlbq_m_s8 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
36820 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int16x8_t]: __arm_vmovlbq_m_s16 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36821 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint8x16_t]: __arm_vmovlbq_m_u8 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
36822 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint16x8_t]: __arm_vmovlbq_m_u16 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint16x8_t), p2));})
36824 #define __arm_vmovnbq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36825 __typeof(p1) __p1 = (p1); \
36826 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36827 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vmovnbq_m_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36828 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vmovnbq_m_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
36829 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vmovnbq_m_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
36830 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vmovnbq_m_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
36832 #define __arm_vmovntq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36833 __typeof(p1) __p1 = (p1); \
36834 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36835 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vmovntq_m_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36836 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vmovntq_m_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
36837 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vmovntq_m_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
36838 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vmovntq_m_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
36840 #define __arm_vmovltq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36841 __typeof(p1) __p1 = (p1); \
36842 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36843 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int8x16_t]: __arm_vmovltq_m_s8 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
36844 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int16x8_t]: __arm_vmovltq_m_s16 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36845 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint8x16_t]: __arm_vmovltq_m_u8 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
36846 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint16x8_t]: __arm_vmovltq_m_u16 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint16x8_t), p2));})
36848 #define __arm_vshrnbq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36849 __typeof(p1) __p1 = (p1); \
36850 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36851 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vshrnbq_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36852 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vshrnbq_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
36853 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vshrnbq_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
36854 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vshrnbq_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
36856 #define __arm_vcvtaq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36857 __typeof(p1) __p1 = (p1); \
36858 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36859 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcvtaq_m_s16_f16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
36860 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcvtaq_m_s32_f32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2), \
36861 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcvtaq_m_u16_f16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
36862 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcvtaq_m_u32_f32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
36864 #define __arm_vcvtq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36865 __typeof(p1) __p1 = (p1); \
36866 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36867 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcvtq_m_f16_s16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36868 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcvtq_m_f32_s32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
36869 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcvtq_m_f16_u16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
36870 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcvtq_m_f32_u32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
36871 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcvtq_m_s16_f16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
36872 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcvtq_m_s32_f32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2), \
36873 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcvtq_m_u16_f16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
36874 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcvtq_m_u32_f32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
36876 #define __arm_vcvtq_m_n(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
36877 __typeof(p1) __p1 = (p1); \
36878 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36879 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcvtq_m_n_s16_f16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2, p3), \
36880 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcvtq_m_n_s32_f32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2, p3), \
36881 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcvtq_m_n_u16_f16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2, p3), \
36882 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcvtq_m_n_u32_f32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2, p3), \
36883 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcvtq_m_n_f16_s16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
36884 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcvtq_m_n_f32_s32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
36885 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcvtq_m_n_f16_u16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
36886 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcvtq_m_n_f32_u32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
36888 #define __arm_vabsq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36889 __typeof(p1) __p1 = (p1); \
36890 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36891 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vabsq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
36892 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vabsq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36893 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vabsq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
36894 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vabsq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
36895 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vabsq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
36897 #define __arm_vcmlaq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36898 __typeof(p1) __p1 = (p1); \
36899 __typeof(p2) __p2 = (p2); \
36900 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36901 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmlaq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t)), \
36902 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmlaq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t)));})
36904 #define __arm_vcmlaq_rot180(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36905 __typeof(p1) __p1 = (p1); \
36906 __typeof(p2) __p2 = (p2); \
36907 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36908 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmlaq_rot180_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t)), \
36909 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmlaq_rot180_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t)));})
36911 #define __arm_vcmlaq_rot270(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36912 __typeof(p1) __p1 = (p1); \
36913 __typeof(p2) __p2 = (p2); \
36914 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36915 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmlaq_rot270_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t)), \
36916 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmlaq_rot270_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t)));})
36918 #define __arm_vcmlaq_rot90(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36919 __typeof(p1) __p1 = (p1); \
36920 __typeof(p2) __p2 = (p2); \
36921 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
36922 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmlaq_rot90_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t)), \
36923 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmlaq_rot90_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t)));})
36925 #define __arm_vrndxq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36926 __typeof(p1) __p1 = (p1); \
36927 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36928 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vrndxq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
36929 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vrndxq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
36931 #define __arm_vrndq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36932 __typeof(p1) __p1 = (p1); \
36933 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36934 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vrndq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
36935 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vrndq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
36937 #define __arm_vrndpq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36938 __typeof(p1) __p1 = (p1); \
36939 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36940 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vrndpq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
36941 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vrndpq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
36943 #define __arm_vcmpgtq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36944 __typeof(p1) __p1 = (p1); \
36945 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36946 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpgtq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
36947 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpgtq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36948 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpgtq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
36949 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpgtq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int), p2), \
36950 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpgtq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int), p2), \
36951 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpgtq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int), p2), \
36952 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vcmpgtq_m_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce2(p1, double), p2), \
36953 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vcmpgtq_m_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce2(p1, double), p2), \
36954 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmpgtq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
36955 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmpgtq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
36957 #define __arm_vcmpleq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36958 __typeof(p1) __p1 = (p1); \
36959 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36960 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpleq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
36961 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpleq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36962 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpleq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
36963 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmpleq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
36964 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmpleq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2), \
36965 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpleq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int), p2), \
36966 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpleq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int), p2), \
36967 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpleq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int), p2), \
36968 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vcmpleq_m_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce2(p1, double), p2), \
36969 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vcmpleq_m_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce2(p1, double), p2));})
36971 #define __arm_vcmpltq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36972 __typeof(p1) __p1 = (p1); \
36973 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36974 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpltq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
36975 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpltq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36976 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpltq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
36977 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmpltq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
36978 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmpltq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2), \
36979 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpltq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int), p2), \
36980 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpltq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int), p2), \
36981 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpltq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int), p2), \
36982 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vcmpltq_m_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce2(p1, double), p2), \
36983 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vcmpltq_m_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce2(p1, double), p2));})
36985 #define __arm_vcmpneq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
36986 __typeof(p1) __p1 = (p1); \
36987 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
36988 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpneq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
36989 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpneq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
36990 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpneq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
36991 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcmpneq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
36992 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcmpneq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
36993 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcmpneq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
36994 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmpneq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
36995 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmpneq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2), \
36996 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int), p2), \
36997 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int), p2), \
36998 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int), p2), \
36999 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int), p2), \
37000 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int), p2), \
37001 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int), p2), \
37002 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vcmpneq_m_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce2(p1, double), p2), \
37003 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vcmpneq_m_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce2(p1, double), p2));})
37005 #define __arm_vcvtbq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37006 __typeof(p1) __p1 = (p1); \
37007 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37008 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float16x8_t]: __arm_vcvtbq_m_f32_f16 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37009 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float32x4_t]: __arm_vcvtbq_m_f16_f32 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37011 #define __arm_vcvttq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37012 __typeof(p1) __p1 = (p1); \
37013 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37014 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float16x8_t]: __arm_vcvttq_m_f32_f16 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37015 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float32x4_t]: __arm_vcvttq_m_f16_f32 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37017 #define __arm_vcvtmq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37018 __typeof(p1) __p1 = (p1); \
37019 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37020 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcvtmq_m_s16_f16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37021 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcvtmq_m_s32_f32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2), \
37022 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcvtmq_m_u16_f16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37023 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcvtmq_m_u32_f32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37025 #define __arm_vcvtnq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37026 __typeof(p1) __p1 = (p1); \
37027 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37028 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcvtnq_m_s16_f16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37029 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcvtnq_m_s32_f32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2), \
37030 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcvtnq_m_u16_f16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37031 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcvtnq_m_u32_f32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37033 #define __arm_vcvtpq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37034 __typeof(p1) __p1 = (p1); \
37035 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37036 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcvtpq_m_s16_f16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37037 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcvtpq_m_s32_f32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2), \
37038 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcvtpq_m_u16_f16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37039 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcvtpq_m_u32_f32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37041 #define __arm_vdupq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37042 __typeof(p1) __p1 = (p1); \
37043 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37044 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vdupq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), (int8_t) __p1, p2), \
37045 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vdupq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), (int16_t) __p1, p2), \
37046 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vdupq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), (int32_t) __p1, p2), \
37047 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vdupq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), (uint8_t) __p1, p2), \
37048 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vdupq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), (uint16_t) __p1, p2), \
37049 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vdupq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), (uint32_t) __p1, p2), \
37050 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vdupq_m_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), (float16_t) __p1, p2), \
37051 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vdupq_m_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), (float32_t) __p1, p2));})
37053 #define __arm_vfmaq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37054 __typeof(p1) __p1 = (p1); \
37055 __typeof(p2) __p2 = (p2); \
37056 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37057 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vfmaq_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce2(p2, double)), \
37058 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vfmaq_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce2(p2, double)), \
37059 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vfmaq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t)), \
37060 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vfmaq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t)));})
37062 #define __arm_vfmsq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37063 __typeof(p1) __p1 = (p1); \
37064 __typeof(p2) __p2 = (p2); \
37065 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37066 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vfmsq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t)), \
37067 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vfmsq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t)));})
37069 #define __arm_vfmasq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37070 __typeof(p1) __p1 = (p1); \
37071 __typeof(p2) __p2 = (p2); \
37072 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37073 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vfmasq_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce2(p2, double)), \
37074 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vfmasq_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce2(p2, double)));})
37076 #define __arm_vmaxnmaq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37077 __typeof(p1) __p1 = (p1); \
37078 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37079 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vmaxnmaq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37080 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vmaxnmaq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37082 #define __arm_vmaxnmavq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37083 __typeof(p1) __p1 = (p1); \
37084 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37085 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vmaxnmavq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37086 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vmaxnmavq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37088 #define __arm_vmaxnmvq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37089 __typeof(p1) __p1 = (p1); \
37090 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37091 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vmaxnmvq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37092 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vmaxnmvq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37094 #define __arm_vmaxnmavq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37095 __typeof(p1) __p1 = (p1); \
37096 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37097 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float16x8_t]: __arm_vmaxnmavq_p_f16 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37098 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float32x4_t]: __arm_vmaxnmavq_p_f32 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37100 #define __arm_vmaxnmvq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37101 __typeof(p1) __p1 = (p1); \
37102 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37103 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float16x8_t]: __arm_vmaxnmvq_p_f16 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37104 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float32x4_t]: __arm_vmaxnmvq_p_f32 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37106 #define __arm_vminnmaq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37107 __typeof(p1) __p1 = (p1); \
37108 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37109 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vminnmaq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37110 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vminnmaq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37112 #define __arm_vminnmavq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37113 __typeof(p1) __p1 = (p1); \
37114 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37115 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float16x8_t]: __arm_vminnmavq_p_f16 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37116 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float32x4_t]: __arm_vminnmavq_p_f32 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37118 #define __arm_vminnmvq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37119 __typeof(p1) __p1 = (p1); \
37120 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37121 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float16x8_t]: __arm_vminnmvq_p_f16 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37122 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float32x4_t]: __arm_vminnmvq_p_f32 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37124 #define __arm_vrndnq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37125 __typeof(p1) __p1 = (p1); \
37126 __typeof(p2) __p2 = (p2); \
37127 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37128 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vrndnq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37129 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vrndnq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __p2));})
37131 #define __arm_vrndaq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37132 __typeof(p1) __p1 = (p1); \
37133 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37134 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vrndaq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37135 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vrndaq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37137 #define __arm_vrndmq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37138 __typeof(p1) __p1 = (p1); \
37139 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37140 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vrndmq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37141 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vrndmq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37143 #define __arm_vrev64q_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37144 __typeof(p1) __p1 = (p1); \
37145 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37146 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrev64q_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
37147 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrev64q_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37148 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrev64q_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
37149 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vrev64q_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
37150 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vrev64q_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
37151 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vrev64q_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
37152 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vrev64q_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37153 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vrev64q_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37155 #define __arm_vrev32q_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37156 __typeof(p1) __p1 = (p1); \
37157 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37158 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrev32q_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
37159 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrev32q_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37160 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vrev32q_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
37161 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vrev32q_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
37162 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vrev32q_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2));})
37164 #define __arm_vpselq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37165 __typeof(p1) __p1 = (p1); \
37166 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37167 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vpselq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
37168 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vpselq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37169 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vpselq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
37170 int (*)[__ARM_mve_type_int64x2_t][__ARM_mve_type_int64x2_t]: __arm_vpselq_s64 (__ARM_mve_coerce(__p0, int64x2_t), __ARM_mve_coerce(__p1, int64x2_t), p2), \
37171 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vpselq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
37172 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vpselq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
37173 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vpselq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
37174 int (*)[__ARM_mve_type_uint64x2_t][__ARM_mve_type_uint64x2_t]: __arm_vpselq_u64 (__ARM_mve_coerce(__p0, uint64x2_t), __ARM_mve_coerce(__p1, uint64x2_t), p2), \
37175 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vpselq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37176 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vpselq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37178 #define __arm_vcmpgeq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
37179 __typeof(p1) __p1 = (p1); \
37180 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37181 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpgeq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
37182 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpgeq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
37183 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpgeq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
37184 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpgeq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
37185 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpgeq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
37186 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpgeq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
37187 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmpgeq_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t)), \
37188 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmpgeq_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t)), \
37189 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vcmpgeq_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce2(p1, double)), \
37190 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vcmpgeq_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce2(p1, double)));})
37192 #define __arm_vrshrnbq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37193 __typeof(p1) __p1 = (p1); \
37194 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37195 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vrshrnbq_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37196 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vrshrnbq_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
37197 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vrshrnbq_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
37198 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vrshrnbq_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
37200 #define __arm_vrev16q_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37201 __typeof(p1) __p1 = (p1); \
37202 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37203 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrev16q_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
37204 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vrev16q_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2));})
37206 #define __arm_vqshruntq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37207 __typeof(p1) __p1 = (p1); \
37208 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37209 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqshruntq_n_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37210 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqshruntq_n_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
37212 #define __arm_vqshrnbq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37213 __typeof(p1) __p1 = (p1); \
37214 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37215 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqshrnbq_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37216 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqshrnbq_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
37217 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqshrnbq_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
37218 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqshrnbq_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
37220 #define __arm_vqshrntq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37221 __typeof(p1) __p1 = (p1); \
37222 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37223 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqshrntq_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37224 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqshrntq_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
37225 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqshrntq_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
37226 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqshrntq_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
37228 #define __arm_vqrshruntq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37229 __typeof(p1) __p1 = (p1); \
37230 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37231 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqrshruntq_n_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37232 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqrshruntq_n_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
37234 #define __arm_vqmovnbq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37235 __typeof(p1) __p1 = (p1); \
37236 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37237 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqmovnbq_m_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37238 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqmovnbq_m_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
37239 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqmovnbq_m_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
37240 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqmovnbq_m_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
37242 #define __arm_vqmovntq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37243 __typeof(p1) __p1 = (p1); \
37244 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37245 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqmovntq_m_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37246 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqmovntq_m_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
37247 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqmovntq_m_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
37248 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqmovntq_m_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
37250 #define __arm_vqmovunbq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37251 __typeof(p1) __p1 = (p1); \
37252 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37253 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqmovunbq_m_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37254 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqmovunbq_m_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
37256 #define __arm_vqmovuntq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37257 __typeof(p1) __p1 = (p1); \
37258 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37259 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqmovuntq_m_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37260 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqmovuntq_m_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
37262 #define __arm_vqrshrntq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37263 __typeof(p1) __p1 = (p1); \
37264 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37265 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqrshrntq_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37266 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqrshrntq_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
37267 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqrshrntq_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
37268 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqrshrntq_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
37270 #define __arm_vqrshruntq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37271 __typeof(p1) __p1 = (p1); \
37272 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37273 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqrshruntq_n_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37274 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqrshruntq_n_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
37276 #define __arm_vnegq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37277 __typeof(p1) __p1 = (p1); \
37278 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37279 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vnegq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
37280 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vnegq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37281 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vnegq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
37282 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vnegq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37283 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vnegq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37285 #define __arm_vcmpgeq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37286 __typeof(p1) __p1 = (p1); \
37287 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37288 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpgeq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
37289 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpgeq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37290 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpgeq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
37291 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpgeq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int), p2), \
37292 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpgeq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int), p2), \
37293 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpgeq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int), p2), \
37294 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vcmpgeq_m_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce2(p1, double), p2), \
37295 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vcmpgeq_m_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce2(p1, double), p2), \
37296 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmpgeq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37297 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmpgeq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37299 #define __arm_vabdq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37300 __typeof(p1) __p1 = (p1); \
37301 __typeof(p2) __p2 = (p2); \
37302 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37303 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vabdq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
37304 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vabdq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
37305 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vabdq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
37306 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vabdq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
37307 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vabdq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
37308 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vabdq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37309 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vabdq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37310 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vabdq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37312 #define __arm_vaddq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37313 __typeof(p1) __p1 = (p1); \
37314 __typeof(p2) __p2 = (p2); \
37315 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37316 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vaddq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
37317 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vaddq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
37318 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vaddq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
37319 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vaddq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
37320 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vaddq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
37321 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vaddq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37322 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vaddq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37323 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vaddq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3), \
37324 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vaddq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
37325 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vaddq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
37326 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vaddq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
37327 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vaddq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
37328 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vaddq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
37329 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vaddq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3), \
37330 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vaddq_m_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce2(p2, double), p3), \
37331 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vaddq_m_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce2(p2, double), p3));})
37333 #define __arm_vandq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37334 __typeof(p1) __p1 = (p1); \
37335 __typeof(p2) __p2 = (p2); \
37336 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37337 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vandq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
37338 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vandq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
37339 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vandq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
37340 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vandq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
37341 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vandq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
37342 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vandq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37343 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vandq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37344 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vandq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37346 #define __arm_vbicq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37347 __typeof(p1) __p1 = (p1); \
37348 __typeof(p2) __p2 = (p2); \
37349 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37350 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vbicq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
37351 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vbicq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
37352 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vbicq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
37353 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vbicq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
37354 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vbicq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
37355 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vbicq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37356 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vbicq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37357 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vbicq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37359 #define __arm_vbrsrq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37360 __typeof(p1) __p1 = (p1); \
37361 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
37362 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vbrsrq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2, p3), \
37363 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vbrsrq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
37364 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vbrsrq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
37365 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vbrsrq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2, p3), \
37366 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vbrsrq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
37367 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vbrsrq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2, p3), \
37368 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vbrsrq_m_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), p2, p3), \
37369 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vbrsrq_m_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), p2, p3));})
37371 #define __arm_vcaddq_rot270_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37372 __typeof(p1) __p1 = (p1); \
37373 __typeof(p2) __p2 = (p2); \
37374 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37375 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcaddq_rot270_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
37376 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcaddq_rot270_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
37377 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcaddq_rot270_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
37378 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcaddq_rot270_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
37379 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcaddq_rot270_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
37380 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcaddq_rot270_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37381 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcaddq_rot270_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37382 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcaddq_rot270_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37384 #define __arm_vcaddq_rot90_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37385 __typeof(p1) __p1 = (p1); \
37386 __typeof(p2) __p2 = (p2); \
37387 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37388 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcaddq_rot90_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
37389 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcaddq_rot90_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
37390 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcaddq_rot90_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
37391 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcaddq_rot90_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
37392 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcaddq_rot90_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
37393 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcaddq_rot90_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37394 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcaddq_rot90_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37395 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcaddq_rot90_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37397 #define __arm_vcmlaq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37398 __typeof(p1) __p1 = (p1); \
37399 __typeof(p2) __p2 = (p2); \
37400 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37401 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmlaq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37402 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmlaq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37404 #define __arm_vcmlaq_rot180_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37405 __typeof(p1) __p1 = (p1); \
37406 __typeof(p2) __p2 = (p2); \
37407 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37408 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmlaq_rot180_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37409 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmlaq_rot180_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37411 #define __arm_vcmlaq_rot270_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37412 __typeof(p1) __p1 = (p1); \
37413 __typeof(p2) __p2 = (p2); \
37414 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37415 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmlaq_rot270_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37416 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmlaq_rot270_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37418 #define __arm_vcmlaq_rot90_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37419 __typeof(p1) __p1 = (p1); \
37420 __typeof(p2) __p2 = (p2); \
37421 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37422 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmlaq_rot90_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37423 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmlaq_rot90_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37425 #define __arm_vcmulq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37426 __typeof(p1) __p1 = (p1); \
37427 __typeof(p2) __p2 = (p2); \
37428 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37429 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmulq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37430 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmulq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37432 #define __arm_vcmulq_rot180_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37433 __typeof(p1) __p1 = (p1); \
37434 __typeof(p2) __p2 = (p2); \
37435 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37436 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmulq_rot180_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37437 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmulq_rot180_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37439 #define __arm_vcmulq_rot270_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37440 __typeof(p1) __p1 = (p1); \
37441 __typeof(p2) __p2 = (p2); \
37442 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37443 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmulq_rot270_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37444 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmulq_rot270_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37446 #define __arm_vcmulq_rot90_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37447 __typeof(p1) __p1 = (p1); \
37448 __typeof(p2) __p2 = (p2); \
37449 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)] [__ARM_mve_typeid(__p2)])0, \
37450 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmulq_rot90_m_f16(__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37451 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmulq_rot90_m_f32(__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37453 #define __arm_veorq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37454 __typeof(p1) __p1 = (p1); \
37455 __typeof(p2) __p2 = (p2); \
37456 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37457 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_veorq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
37458 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_veorq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
37459 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_veorq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
37460 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_veorq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
37461 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_veorq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
37462 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_veorq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37463 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_veorq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37464 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_veorq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37466 #define __arm_vfmaq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37467 __typeof(p1) __p1 = (p1); \
37468 __typeof(p2) __p2 = (p2); \
37469 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37470 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vfmaq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37471 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vfmaq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3), \
37472 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vfmaq_m_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce2(p2, double), p3), \
37473 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vfmaq_m_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce2(p2, double), p3));})
37475 #define __arm_vfmasq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37476 __typeof(p1) __p1 = (p1); \
37477 __typeof(p2) __p2 = (p2); \
37478 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37479 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vfmasq_m_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce2(p2, double), p3), \
37480 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vfmasq_m_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce2(p2, double), p3));})
37482 #define __arm_vfmsq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37483 __typeof(p1) __p1 = (p1); \
37484 __typeof(p2) __p2 = (p2); \
37485 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37486 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vfmsq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37487 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vfmsq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37489 #define __arm_vmaxnmq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37490 __typeof(p1) __p1 = (p1); \
37491 __typeof(p2) __p2 = (p2); \
37492 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37493 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vmaxnmq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37494 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vmaxnmq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37496 #define __arm_vminnmq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37497 __typeof(p1) __p1 = (p1); \
37498 __typeof(p2) __p2 = (p2); \
37499 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37500 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vminnmq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37501 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vminnmq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37503 #define __arm_vmulq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37504 __typeof(p1) __p1 = (p1); \
37505 __typeof(p2) __p2 = (p2); \
37506 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37507 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmulq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
37508 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmulq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
37509 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmulq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
37510 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
37511 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
37512 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmulq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37513 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vmulq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37514 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vmulq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3), \
37515 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vmulq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
37516 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vmulq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
37517 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vmulq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
37518 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vmulq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
37519 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vmulq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
37520 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vmulq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3), \
37521 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vmulq_m_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce2(p2, double), p3), \
37522 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vmulq_m_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce2(p2, double), p3));})
37524 #define __arm_vornq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37525 __typeof(p1) __p1 = (p1); \
37526 __typeof(p2) __p2 = (p2); \
37527 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37528 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vornq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
37529 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vornq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
37530 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vornq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
37531 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vornq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
37532 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vornq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
37533 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vornq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37534 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vornq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37535 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vornq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37537 #define __arm_vsubq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37538 __typeof(p1) __p1 = (p1); \
37539 __typeof(p2) __p2 = (p2); \
37540 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37541 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vsubq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
37542 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vsubq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
37543 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vsubq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
37544 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vsubq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
37545 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vsubq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
37546 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vsubq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37547 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vsubq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37548 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vsubq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3), \
37549 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vsubq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
37550 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vsubq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
37551 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vsubq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
37552 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vsubq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
37553 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vsubq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
37554 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vsubq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3), \
37555 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vsubq_m_n_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce2(p2, double), p3), \
37556 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vsubq_m_n_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce2(p2, double), p3));})
37558 #define __arm_vorrq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37559 __typeof(p1) __p1 = (p1); \
37560 __typeof(p2) __p2 = (p2); \
37561 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37562 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vorrq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
37563 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vorrq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
37564 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vorrq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
37565 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vorrq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
37566 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vorrq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
37567 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vorrq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37568 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vorrq_m_f16 (__ARM_mve_coerce(__p0, float16x8_t), __ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
37569 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vorrq_m_f32 (__ARM_mve_coerce(__p0, float32x4_t), __ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
37571 #define __arm_vld1q(p0) (\
37572 _Generic( (int (*)[__ARM_mve_typeid(p0)])0, \
37573 int (*)[__ARM_mve_type_int8_t_ptr]: __arm_vld1q_s8 (__ARM_mve_coerce1(p0, int8_t *)), \
37574 int (*)[__ARM_mve_type_int16_t_ptr]: __arm_vld1q_s16 (__ARM_mve_coerce1(p0, int16_t *)), \
37575 int (*)[__ARM_mve_type_int32_t_ptr]: __arm_vld1q_s32 (__ARM_mve_coerce1(p0, int32_t *)), \
37576 int (*)[__ARM_mve_type_uint8_t_ptr]: __arm_vld1q_u8 (__ARM_mve_coerce1(p0, uint8_t *)), \
37577 int (*)[__ARM_mve_type_uint16_t_ptr]: __arm_vld1q_u16 (__ARM_mve_coerce1(p0, uint16_t *)), \
37578 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vld1q_u32 (__ARM_mve_coerce1(p0, uint32_t *)), \
37579 int (*)[__ARM_mve_type_float16_t_ptr]: __arm_vld1q_f16 (__ARM_mve_coerce1(p0, float16_t *)), \
37580 int (*)[__ARM_mve_type_float32_t_ptr]: __arm_vld1q_f32 (__ARM_mve_coerce1(p0, float32_t *))))
37582 #define __arm_vld1q_z(p0,p1) ( \
37583 _Generic( (int (*)[__ARM_mve_typeid(p0)])0, \
37584 int (*)[__ARM_mve_type_int8_t_ptr]: __arm_vld1q_z_s8 (__ARM_mve_coerce1(p0, int8_t *), p1), \
37585 int (*)[__ARM_mve_type_int16_t_ptr]: __arm_vld1q_z_s16 (__ARM_mve_coerce1(p0, int16_t *), p1), \
37586 int (*)[__ARM_mve_type_int32_t_ptr]: __arm_vld1q_z_s32 (__ARM_mve_coerce1(p0, int32_t *), p1), \
37587 int (*)[__ARM_mve_type_uint8_t_ptr]: __arm_vld1q_z_u8 (__ARM_mve_coerce1(p0, uint8_t *), p1), \
37588 int (*)[__ARM_mve_type_uint16_t_ptr]: __arm_vld1q_z_u16 (__ARM_mve_coerce1(p0, uint16_t *), p1), \
37589 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vld1q_z_u32 (__ARM_mve_coerce1(p0, uint32_t *), p1), \
37590 int (*)[__ARM_mve_type_float16_t_ptr]: __arm_vld1q_z_f16 (__ARM_mve_coerce1(p0, float16_t *), p1), \
37591 int (*)[__ARM_mve_type_float32_t_ptr]: __arm_vld1q_z_f32 (__ARM_mve_coerce1(p0, float32_t *), p1)))
37593 #define __arm_vld2q(p0) ( \
37594 _Generic( (int (*)[__ARM_mve_typeid(p0)])0, \
37595 int (*)[__ARM_mve_type_int8_t_ptr]: __arm_vld2q_s8 (__ARM_mve_coerce1(p0, int8_t *)), \
37596 int (*)[__ARM_mve_type_int16_t_ptr]: __arm_vld2q_s16 (__ARM_mve_coerce1(p0, int16_t *)), \
37597 int (*)[__ARM_mve_type_int32_t_ptr]: __arm_vld2q_s32 (__ARM_mve_coerce1(p0, int32_t *)), \
37598 int (*)[__ARM_mve_type_uint8_t_ptr]: __arm_vld2q_u8 (__ARM_mve_coerce1(p0, uint8_t *)), \
37599 int (*)[__ARM_mve_type_uint16_t_ptr]: __arm_vld2q_u16 (__ARM_mve_coerce1(p0, uint16_t *)), \
37600 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vld2q_u32 (__ARM_mve_coerce1(p0, uint32_t *)), \
37601 int (*)[__ARM_mve_type_float16_t_ptr]: __arm_vld2q_f16 (__ARM_mve_coerce1(p0, float16_t *)), \
37602 int (*)[__ARM_mve_type_float32_t_ptr]: __arm_vld2q_f32 (__ARM_mve_coerce1(p0, float32_t *))))
37604 #define __arm_vld4q(p0) ( \
37605 _Generic( (int (*)[__ARM_mve_typeid(p0)])0, \
37606 int (*)[__ARM_mve_type_int8_t_ptr]: __arm_vld4q_s8 (__ARM_mve_coerce1(p0, int8_t *)), \
37607 int (*)[__ARM_mve_type_int16_t_ptr]: __arm_vld4q_s16 (__ARM_mve_coerce1(p0, int16_t *)), \
37608 int (*)[__ARM_mve_type_int32_t_ptr]: __arm_vld4q_s32 (__ARM_mve_coerce1(p0, int32_t *)), \
37609 int (*)[__ARM_mve_type_uint8_t_ptr]: __arm_vld4q_u8 (__ARM_mve_coerce1(p0, uint8_t *)), \
37610 int (*)[__ARM_mve_type_uint16_t_ptr]: __arm_vld4q_u16 (__ARM_mve_coerce1(p0, uint16_t *)), \
37611 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vld4q_u32 (__ARM_mve_coerce1(p0, uint32_t *)), \
37612 int (*)[__ARM_mve_type_float16_t_ptr]: __arm_vld4q_f16 (__ARM_mve_coerce1(p0, float16_t *)), \
37613 int (*)[__ARM_mve_type_float32_t_ptr]: __arm_vld4q_f32 (__ARM_mve_coerce1(p0, float32_t *))))
37615 #define __arm_vldrhq_gather_offset(p0,p1) ({ __typeof(p1) __p1 = (p1); \
37616 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
37617 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_offset_s16 (__ARM_mve_coerce1(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t)), \
37618 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrhq_gather_offset_s32 (__ARM_mve_coerce1(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t)), \
37619 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_offset_u16 (__ARM_mve_coerce1(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t)), \
37620 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrhq_gather_offset_u32 (__ARM_mve_coerce1(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t)), \
37621 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_offset_f16 (__ARM_mve_coerce1(p0, float16_t *), __ARM_mve_coerce(__p1, uint16x8_t)));})
37623 #define __arm_vldrhq_gather_offset_z(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
37624 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
37625 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_offset_z_s16 (__ARM_mve_coerce1(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
37626 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrhq_gather_offset_z_s32 (__ARM_mve_coerce1(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
37627 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_offset_z_u16 (__ARM_mve_coerce1(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
37628 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrhq_gather_offset_z_u32 (__ARM_mve_coerce1(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
37629 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_offset_z_f16 (__ARM_mve_coerce1(p0, float16_t *), __ARM_mve_coerce(__p1, uint16x8_t), p2));})
37631 #define __arm_vldrhq_gather_shifted_offset(p0,p1) ({ __typeof(p1) __p1 = (p1); \
37632 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
37633 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_shifted_offset_s16 (__ARM_mve_coerce1(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t)), \
37634 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrhq_gather_shifted_offset_s32 (__ARM_mve_coerce1(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t)), \
37635 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_shifted_offset_u16 (__ARM_mve_coerce1(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t)), \
37636 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrhq_gather_shifted_offset_u32 (__ARM_mve_coerce1(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t)), \
37637 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_shifted_offset_f16 (__ARM_mve_coerce1(p0, float16_t *), __ARM_mve_coerce(__p1, uint16x8_t)));})
37639 #define __arm_vldrhq_gather_shifted_offset_z(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
37640 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
37641 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_shifted_offset_z_s16 (__ARM_mve_coerce1(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
37642 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrhq_gather_shifted_offset_z_s32 (__ARM_mve_coerce1(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
37643 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_shifted_offset_z_u16 (__ARM_mve_coerce1(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
37644 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrhq_gather_shifted_offset_z_u32 (__ARM_mve_coerce1(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
37645 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_shifted_offset_z_f16 (__ARM_mve_coerce1(p0, float16_t *), __ARM_mve_coerce(__p1, uint16x8_t), p2));})
37647 #define __arm_vldrwq_gather_offset(p0,p1) ( \
37648 _Generic( (int (*)[__ARM_mve_typeid(p0)])0, \
37649 int (*)[__ARM_mve_type_int32_t_ptr]: __arm_vldrwq_gather_offset_s32 (__ARM_mve_coerce1(p0, int32_t *), p1), \
37650 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vldrwq_gather_offset_u32 (__ARM_mve_coerce1(p0, uint32_t *), p1), \
37651 int (*)[__ARM_mve_type_float32_t_ptr]: __arm_vldrwq_gather_offset_f32 (__ARM_mve_coerce1(p0, float32_t *), p1)))
37653 #define __arm_vldrwq_gather_offset_z(p0,p1,p2) ( \
37654 _Generic( (int (*)[__ARM_mve_typeid(p0)])0, \
37655 int (*)[__ARM_mve_type_int32_t_ptr]: __arm_vldrwq_gather_offset_z_s32 (__ARM_mve_coerce1(p0, int32_t *), p1, p2), \
37656 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vldrwq_gather_offset_z_u32 (__ARM_mve_coerce1(p0, uint32_t *), p1, p2), \
37657 int (*)[__ARM_mve_type_float32_t_ptr]: __arm_vldrwq_gather_offset_z_f32 (__ARM_mve_coerce1(p0, float32_t *), p1, p2)))
37659 #define __arm_vldrwq_gather_shifted_offset(p0,p1) ( \
37660 _Generic( (int (*)[__ARM_mve_typeid(p0)])0, \
37661 int (*)[__ARM_mve_type_int32_t_ptr]: __arm_vldrwq_gather_shifted_offset_s32 (__ARM_mve_coerce1(p0, int32_t *), p1), \
37662 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vldrwq_gather_shifted_offset_u32 (__ARM_mve_coerce1(p0, uint32_t *), p1), \
37663 int (*)[__ARM_mve_type_float32_t_ptr]: __arm_vldrwq_gather_shifted_offset_f32 (__ARM_mve_coerce1(p0, float32_t *), p1)))
37665 #define __arm_vldrwq_gather_shifted_offset_z(p0,p1,p2) ( \
37666 _Generic( (int (*)[__ARM_mve_typeid(p0)])0, \
37667 int (*)[__ARM_mve_type_int32_t_ptr]: __arm_vldrwq_gather_shifted_offset_z_s32 (__ARM_mve_coerce1(p0, int32_t *), p1, p2), \
37668 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vldrwq_gather_shifted_offset_z_u32 (__ARM_mve_coerce1(p0, uint32_t *), p1, p2), \
37669 int (*)[__ARM_mve_type_float32_t_ptr]: __arm_vldrwq_gather_shifted_offset_z_f32 (__ARM_mve_coerce1(p0, float32_t *), p1, p2)))
37671 #define __arm_vst1q_p(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
37672 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
37673 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_int8x16_t]: __arm_vst1q_p_s8 (__ARM_mve_coerce(p0, int8_t *), __ARM_mve_coerce(__p1, int8x16_t), p2), \
37674 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_int16x8_t]: __arm_vst1q_p_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37675 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vst1q_p_s32 (__ARM_mve_coerce(p0, int32_t *), __ARM_mve_coerce(__p1, int32x4_t), p2), \
37676 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint8x16_t]: __arm_vst1q_p_u8 (__ARM_mve_coerce(p0, uint8_t *), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
37677 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vst1q_p_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
37678 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vst1q_p_u32 (__ARM_mve_coerce(p0, uint32_t *), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
37679 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_float16x8_t]: __arm_vst1q_p_f16 (__ARM_mve_coerce(p0, float16_t *), __ARM_mve_coerce(__p1, float16x8_t), p2), \
37680 int (*)[__ARM_mve_type_float32_t_ptr][__ARM_mve_type_float32x4_t]: __arm_vst1q_p_f32 (__ARM_mve_coerce(p0, float32_t *), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37682 #define __arm_vst2q(p0,p1) ({ __typeof(p1) __p1 = (p1); \
37683 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
37684 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_int8x16x2_t]: __arm_vst2q_s8 (__ARM_mve_coerce(p0, int8_t *), __ARM_mve_coerce(__p1, int8x16x2_t)), \
37685 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_int16x8x2_t]: __arm_vst2q_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, int16x8x2_t)), \
37686 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4x2_t]: __arm_vst2q_s32 (__ARM_mve_coerce(p0, int32_t *), __ARM_mve_coerce(__p1, int32x4x2_t)), \
37687 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint8x16x2_t]: __arm_vst2q_u8 (__ARM_mve_coerce(p0, uint8_t *), __ARM_mve_coerce(__p1, uint8x16x2_t)), \
37688 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8x2_t]: __arm_vst2q_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8x2_t)), \
37689 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4x2_t]: __arm_vst2q_u32 (__ARM_mve_coerce(p0, uint32_t *), __ARM_mve_coerce(__p1, uint32x4x2_t)), \
37690 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_float16x8x2_t]: __arm_vst2q_f16 (__ARM_mve_coerce(p0, float16_t *), __ARM_mve_coerce(__p1, float16x8x2_t)), \
37691 int (*)[__ARM_mve_type_float32_t_ptr][__ARM_mve_type_float32x4x2_t]: __arm_vst2q_f32 (__ARM_mve_coerce(p0, float32_t *), __ARM_mve_coerce(__p1, float32x4x2_t)));})
37693 #define __arm_vst1q(p0,p1) ({ __typeof(p1) __p1 = (p1); \
37694 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
37695 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_int8x16_t]: __arm_vst1q_s8 (__ARM_mve_coerce(p0, int8_t *), __ARM_mve_coerce(__p1, int8x16_t)), \
37696 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_int16x8_t]: __arm_vst1q_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, int16x8_t)), \
37697 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vst1q_s32 (__ARM_mve_coerce(p0, int32_t *), __ARM_mve_coerce(__p1, int32x4_t)), \
37698 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint8x16_t]: __arm_vst1q_u8 (__ARM_mve_coerce(p0, uint8_t *), __ARM_mve_coerce(__p1, uint8x16_t)), \
37699 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vst1q_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t)), \
37700 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vst1q_u32 (__ARM_mve_coerce(p0, uint32_t *), __ARM_mve_coerce(__p1, uint32x4_t)), \
37701 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_float16x8_t]: __arm_vst1q_f16 (__ARM_mve_coerce(p0, float16_t *), __ARM_mve_coerce(__p1, float16x8_t)), \
37702 int (*)[__ARM_mve_type_float32_t_ptr][__ARM_mve_type_float32x4_t]: __arm_vst1q_f32 (__ARM_mve_coerce(p0, float32_t *), __ARM_mve_coerce(__p1, float32x4_t)));})
37704 #define __arm_vstrhq(p0,p1) ({ __typeof(p1) __p1 = (p1); \
37705 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
37706 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_int16x8_t]: __arm_vstrhq_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, int16x8_t)), \
37707 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrhq_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, int32x4_t)), \
37708 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t)), \
37709 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t)), \
37710 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_float16x8_t]: __arm_vstrhq_f16 (__ARM_mve_coerce(p0, float16_t *), __ARM_mve_coerce(__p1, float16x8_t)));})
37712 #define __arm_vstrhq_p(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
37713 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
37714 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_int16x8_t]: __arm_vstrhq_p_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, int16x8_t), p2), \
37715 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrhq_p_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, int32x4_t), p2), \
37716 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_p_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
37717 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_p_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
37718 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_float16x8_t]: __arm_vstrhq_p_f16 (__ARM_mve_coerce(p0, float16_t *), __ARM_mve_coerce(__p1, float16x8_t), p2));})
37720 #define __arm_vstrhq_scatter_offset_p(p0,p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
37721 __typeof(p2) __p2 = (p2); \
37722 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37723 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrhq_scatter_offset_p_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
37724 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrhq_scatter_offset_p_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
37725 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_scatter_offset_p_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
37726 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_scatter_offset_p_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37727 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_float16x8_t]: __arm_vstrhq_scatter_offset_p_f16 (__ARM_mve_coerce(p0, float16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3));})
37729 #define __arm_vstrhq_scatter_offset(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
37730 __typeof(p2) __p2 = (p2); \
37731 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37732 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrhq_scatter_offset_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
37733 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrhq_scatter_offset_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t)), \
37734 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_scatter_offset_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t)), \
37735 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_scatter_offset_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t)), \
37736 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_float16x8_t]: __arm_vstrhq_scatter_offset_f16 (__ARM_mve_coerce(p0, float16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, float16x8_t)));})
37738 #define __arm_vstrhq_scatter_shifted_offset_p(p0,p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
37739 __typeof(p2) __p2 = (p2); \
37740 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37741 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrhq_scatter_shifted_offset_p_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
37742 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrhq_scatter_shifted_offset_p_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
37743 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_scatter_shifted_offset_p_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
37744 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_scatter_shifted_offset_p_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37745 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_float16x8_t]: __arm_vstrhq_scatter_shifted_offset_p_f16 (__ARM_mve_coerce(p0, float16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3));})
37747 #define __arm_vstrhq_scatter_shifted_offset(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
37748 __typeof(p2) __p2 = (p2); \
37749 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37750 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrhq_scatter_shifted_offset_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
37751 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrhq_scatter_shifted_offset_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t)), \
37752 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_scatter_shifted_offset_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t)), \
37753 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_scatter_shifted_offset_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t)), \
37754 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_float16x8_t]: __arm_vstrhq_scatter_shifted_offset_f16 (__ARM_mve_coerce(p0, float16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, float16x8_t)));})
37756 #define __arm_vstrwq_p(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
37757 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
37758 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrwq_p_s32 (__ARM_mve_coerce(p0, int32_t *), __ARM_mve_coerce(__p1, int32x4_t), p2), \
37759 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrwq_p_u32 (__ARM_mve_coerce(p0, uint32_t *), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
37760 int (*)[__ARM_mve_type_float32_t_ptr][__ARM_mve_type_float32x4_t]: __arm_vstrwq_p_f32 (__ARM_mve_coerce(p0, float32_t *), __ARM_mve_coerce(__p1, float32x4_t), p2));})
37762 #define __arm_vstrwq(p0,p1) ({ __typeof(p1) __p1 = (p1); \
37763 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
37764 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrwq_s32 (__ARM_mve_coerce(p0, int32_t *), __ARM_mve_coerce(__p1, int32x4_t)), \
37765 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrwq_u32 (__ARM_mve_coerce(p0, uint32_t *), __ARM_mve_coerce(__p1, uint32x4_t)), \
37766 int (*)[__ARM_mve_type_float32_t_ptr][__ARM_mve_type_float32x4_t]: __arm_vstrwq_f32 (__ARM_mve_coerce(p0, float32_t *), __ARM_mve_coerce(__p1, float32x4_t)));})
37768 #define __arm_vstrhq_scatter_offset(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
37769 __typeof(p2) __p2 = (p2); \
37770 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37771 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrhq_scatter_offset_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
37772 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrhq_scatter_offset_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t)), \
37773 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_scatter_offset_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t)), \
37774 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_scatter_offset_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t)), \
37775 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_float16x8_t]: __arm_vstrhq_scatter_offset_f16 (__ARM_mve_coerce(p0, float16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, float16x8_t)));})
37777 #define __arm_vstrhq_scatter_offset_p(p0,p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
37778 __typeof(p2) __p2 = (p2); \
37779 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37780 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrhq_scatter_offset_p_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
37781 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrhq_scatter_offset_p_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
37782 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_scatter_offset_p_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
37783 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_scatter_offset_p_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37784 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_float16x8_t]: __arm_vstrhq_scatter_offset_p_f16 (__ARM_mve_coerce(p0, float16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3));})
37786 #define __arm_vstrhq_scatter_shifted_offset(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
37787 __typeof(p2) __p2 = (p2); \
37788 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37789 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrhq_scatter_shifted_offset_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
37790 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrhq_scatter_shifted_offset_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t)), \
37791 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_scatter_shifted_offset_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t)), \
37792 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_scatter_shifted_offset_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t)), \
37793 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_float16x8_t]: __arm_vstrhq_scatter_shifted_offset_f16 (__ARM_mve_coerce(p0, float16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, float16x8_t)));})
37795 #define __arm_vstrhq_scatter_shifted_offset_p(p0,p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
37796 __typeof(p2) __p2 = (p2); \
37797 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
37798 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrhq_scatter_shifted_offset_p_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
37799 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrhq_scatter_shifted_offset_p_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
37800 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_scatter_shifted_offset_p_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
37801 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_scatter_shifted_offset_p_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37802 int (*)[__ARM_mve_type_float16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_float16x8_t]: __arm_vstrhq_scatter_shifted_offset_p_f16 (__ARM_mve_coerce(p0, float16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3));})
37804 #define __arm_vstrwq_scatter_base(p0,p1,p2) ({ __typeof(p2) __p2 = (p2); \
37805 _Generic( (int (*)[__ARM_mve_typeid(__p2)])0, \
37806 int (*)[__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_base_s32 (p0, p1, __ARM_mve_coerce(__p2, int32x4_t)), \
37807 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_base_u32 (p0, p1, __ARM_mve_coerce(__p2, uint32x4_t)), \
37808 int (*)[__ARM_mve_type_float32x4_t]: __arm_vstrwq_scatter_base_f32 (p0, p1, __ARM_mve_coerce(__p2, float32x4_t)));})
37810 #define __arm_vstrwq_scatter_base_p(p0,p1,p2,p3) ({ __typeof(p2) __p2 = (p2); \
37811 _Generic( (int (*)[__ARM_mve_typeid(__p2)])0, \
37812 int (*)[__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_base_p_s32(p0, p1, __ARM_mve_coerce(__p2, int32x4_t), p3), \
37813 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_base_p_u32(p0, p1, __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37814 int (*)[__ARM_mve_type_float32x4_t]: __arm_vstrwq_scatter_base_p_f32(p0, p1, __ARM_mve_coerce(__p2, float32x4_t), p3));})
37816 #define __arm_vstrwq_scatter_offset(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
37817 __typeof(p2) __p2 = (p2); \
37818 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p2)])0, \
37819 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_offset_s32 (__ARM_mve_coerce(__p0, int32_t *), p1, __ARM_mve_coerce(__p2, int32x4_t)), \
37820 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_offset_u32 (__ARM_mve_coerce(__p0, uint32_t *), p1, __ARM_mve_coerce(__p2, uint32x4_t)), \
37821 int (*)[__ARM_mve_type_float32_t_ptr][__ARM_mve_type_float32x4_t]: __arm_vstrwq_scatter_offset_f32 (__ARM_mve_coerce(__p0, float32_t *), p1, __ARM_mve_coerce(__p2, float32x4_t)));})
37823 #define __arm_vstrwq_scatter_offset_p(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
37824 __typeof(p2) __p2 = (p2); \
37825 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p2)])0, \
37826 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_offset_p_s32 (__ARM_mve_coerce(__p0, int32_t *), p1, __ARM_mve_coerce(__p2, int32x4_t), p3), \
37827 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_offset_p_u32 (__ARM_mve_coerce(__p0, uint32_t *), p1, __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37828 int (*)[__ARM_mve_type_float32_t_ptr][__ARM_mve_type_float32x4_t]: __arm_vstrwq_scatter_offset_p_f32 (__ARM_mve_coerce(__p0, float32_t *), p1, __ARM_mve_coerce(__p2, float32x4_t), p3));})
37830 #define __arm_vstrwq_scatter_shifted_offset(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
37831 __typeof(p2) __p2 = (p2); \
37832 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p2)])0, \
37833 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_shifted_offset_s32 (__ARM_mve_coerce(p0, int32_t *), __p1, __ARM_mve_coerce(__p2, int32x4_t)), \
37834 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_shifted_offset_u32 (__ARM_mve_coerce(p0, uint32_t *), __p1, __ARM_mve_coerce(__p2, uint32x4_t)), \
37835 int (*)[__ARM_mve_type_float32_t_ptr][__ARM_mve_type_float32x4_t]: __arm_vstrwq_scatter_shifted_offset_f32 (__ARM_mve_coerce(p0, float32_t *), __p1, __ARM_mve_coerce(__p2, float32x4_t)));})
37837 #define __arm_vstrwq_scatter_shifted_offset_p(p0,p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
37838 __typeof(p2) __p2 = (p2); \
37839 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p2)])0, \
37840 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_shifted_offset_p_s32 (__ARM_mve_coerce(p0, int32_t *), __p1, __ARM_mve_coerce(__p2, int32x4_t), p3), \
37841 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_shifted_offset_p_u32 (__ARM_mve_coerce(p0, uint32_t *), __p1, __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37842 int (*)[__ARM_mve_type_float32_t_ptr][__ARM_mve_type_float32x4_t]: __arm_vstrwq_scatter_shifted_offset_p_f32 (__ARM_mve_coerce(p0, float32_t *), __p1, __ARM_mve_coerce(__p2, float32x4_t), p3));})
37844 #define __arm_vstrwq_scatter_shifted_offset_p(p0,p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
37845 __typeof(p2) __p2 = (p2); \
37846 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p2)])0, \
37847 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_shifted_offset_p_s32 (__ARM_mve_coerce(p0, int32_t *), __p1, __ARM_mve_coerce(__p2, int32x4_t), p3), \
37848 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_shifted_offset_p_u32 (__ARM_mve_coerce(p0, uint32_t *), __p1, __ARM_mve_coerce(__p2, uint32x4_t), p3), \
37849 int (*)[__ARM_mve_type_float32_t_ptr][__ARM_mve_type_float32x4_t]: __arm_vstrwq_scatter_shifted_offset_p_f32 (__ARM_mve_coerce(p0, float32_t *), __p1, __ARM_mve_coerce(__p2, float32x4_t), p3));})
37851 #define __arm_vstrwq_scatter_shifted_offset(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
37852 __typeof(p2) __p2 = (p2); \
37853 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p2)])0, \
37854 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_shifted_offset_s32 (__ARM_mve_coerce(p0, int32_t *), __p1, __ARM_mve_coerce(__p2, int32x4_t)), \
37855 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_shifted_offset_u32 (__ARM_mve_coerce(p0, uint32_t *), __p1, __ARM_mve_coerce(__p2, uint32x4_t)), \
37856 int (*)[__ARM_mve_type_float32_t_ptr][__ARM_mve_type_float32x4_t]: __arm_vstrwq_scatter_shifted_offset_f32 (__ARM_mve_coerce(p0, float32_t *), __p1, __ARM_mve_coerce(__p2, float32x4_t)));})
37858 #define __arm_vuninitializedq(p0) ({ __typeof(p0) __p0 = (p0); \
37859 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
37860 int (*)[__ARM_mve_type_int8x16_t]: __arm_vuninitializedq_s8 (), \
37861 int (*)[__ARM_mve_type_int16x8_t]: __arm_vuninitializedq_s16 (), \
37862 int (*)[__ARM_mve_type_int32x4_t]: __arm_vuninitializedq_s32 (), \
37863 int (*)[__ARM_mve_type_int64x2_t]: __arm_vuninitializedq_s64 (), \
37864 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vuninitializedq_u8 (), \
37865 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vuninitializedq_u16 (), \
37866 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vuninitializedq_u32 (), \
37867 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vuninitializedq_u64 (), \
37868 int (*)[__ARM_mve_type_float16x8_t]: __arm_vuninitializedq_f16 (), \
37869 int (*)[__ARM_mve_type_float32x4_t]: __arm_vuninitializedq_f32 ());})
37871 #define __arm_vreinterpretq_f16(p0) ({ __typeof(p0) __p0 = (p0); \
37872 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
37873 int (*)[__ARM_mve_type_int8x16_t]: __arm_vreinterpretq_f16_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
37874 int (*)[__ARM_mve_type_int16x8_t]: __arm_vreinterpretq_f16_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
37875 int (*)[__ARM_mve_type_int32x4_t]: __arm_vreinterpretq_f16_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
37876 int (*)[__ARM_mve_type_int64x2_t]: __arm_vreinterpretq_f16_s64 (__ARM_mve_coerce(__p0, int64x2_t)), \
37877 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vreinterpretq_f16_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
37878 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vreinterpretq_f16_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
37879 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vreinterpretq_f16_u32 (__ARM_mve_coerce(__p0, uint32x4_t)), \
37880 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vreinterpretq_f16_u64 (__ARM_mve_coerce(__p0, uint64x2_t)), \
37881 int (*)[__ARM_mve_type_float32x4_t]: __arm_vreinterpretq_f16_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
37883 #define __arm_vreinterpretq_f32(p0) ({ __typeof(p0) __p0 = (p0); \
37884 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
37885 int (*)[__ARM_mve_type_int8x16_t]: __arm_vreinterpretq_f32_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
37886 int (*)[__ARM_mve_type_int16x8_t]: __arm_vreinterpretq_f32_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
37887 int (*)[__ARM_mve_type_int32x4_t]: __arm_vreinterpretq_f32_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
37888 int (*)[__ARM_mve_type_int64x2_t]: __arm_vreinterpretq_f32_s64 (__ARM_mve_coerce(__p0, int64x2_t)), \
37889 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vreinterpretq_f32_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
37890 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vreinterpretq_f32_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
37891 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vreinterpretq_f32_u32 (__ARM_mve_coerce(__p0, uint32x4_t)), \
37892 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vreinterpretq_f32_u64 (__ARM_mve_coerce(__p0, uint64x2_t)), \
37893 int (*)[__ARM_mve_type_float16x8_t]: __arm_vreinterpretq_f32_f16 (__ARM_mve_coerce(__p0, float16x8_t)));})
37895 #define __arm_vreinterpretq_s16(p0) ({ __typeof(p0) __p0 = (p0); \
37896 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
37897 int (*)[__ARM_mve_type_float16x8_t]: __arm_vreinterpretq_s16_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
37898 int (*)[__ARM_mve_type_int8x16_t]: __arm_vreinterpretq_s16_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
37899 int (*)[__ARM_mve_type_int32x4_t]: __arm_vreinterpretq_s16_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
37900 int (*)[__ARM_mve_type_int64x2_t]: __arm_vreinterpretq_s16_s64 (__ARM_mve_coerce(__p0, int64x2_t)), \
37901 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vreinterpretq_s16_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
37902 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vreinterpretq_s16_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
37903 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vreinterpretq_s16_u32 (__ARM_mve_coerce(__p0, uint32x4_t)), \
37904 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vreinterpretq_s16_u64 (__ARM_mve_coerce(__p0, uint64x2_t)), \
37905 int (*)[__ARM_mve_type_float32x4_t]: __arm_vreinterpretq_s16_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
37907 #define __arm_vreinterpretq_s32(p0) ({ __typeof(p0) __p0 = (p0); \
37908 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
37909 int (*)[__ARM_mve_type_float16x8_t]: __arm_vreinterpretq_s32_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
37910 int (*)[__ARM_mve_type_int16x8_t]: __arm_vreinterpretq_s32_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
37911 int (*)[__ARM_mve_type_int8x16_t]: __arm_vreinterpretq_s32_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
37912 int (*)[__ARM_mve_type_int64x2_t]: __arm_vreinterpretq_s32_s64 (__ARM_mve_coerce(__p0, int64x2_t)), \
37913 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vreinterpretq_s32_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
37914 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vreinterpretq_s32_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
37915 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vreinterpretq_s32_u32 (__ARM_mve_coerce(__p0, uint32x4_t)), \
37916 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vreinterpretq_s32_u64 (__ARM_mve_coerce(__p0, uint64x2_t)), \
37917 int (*)[__ARM_mve_type_float32x4_t]: __arm_vreinterpretq_s32_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
37919 #define __arm_vreinterpretq_s64(p0) ({ __typeof(p0) __p0 = (p0); \
37920 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
37921 int (*)[__ARM_mve_type_float16x8_t]: __arm_vreinterpretq_s64_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
37922 int (*)[__ARM_mve_type_int16x8_t]: __arm_vreinterpretq_s64_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
37923 int (*)[__ARM_mve_type_int32x4_t]: __arm_vreinterpretq_s64_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
37924 int (*)[__ARM_mve_type_int8x16_t]: __arm_vreinterpretq_s64_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
37925 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vreinterpretq_s64_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
37926 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vreinterpretq_s64_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
37927 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vreinterpretq_s64_u32 (__ARM_mve_coerce(__p0, uint32x4_t)), \
37928 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vreinterpretq_s64_u64 (__ARM_mve_coerce(__p0, uint64x2_t)), \
37929 int (*)[__ARM_mve_type_float32x4_t]: __arm_vreinterpretq_s64_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
37931 #define __arm_vreinterpretq_s8(p0) ({ __typeof(p0) __p0 = (p0); \
37932 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
37933 int (*)[__ARM_mve_type_float16x8_t]: __arm_vreinterpretq_s8_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
37934 int (*)[__ARM_mve_type_int16x8_t]: __arm_vreinterpretq_s8_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
37935 int (*)[__ARM_mve_type_int32x4_t]: __arm_vreinterpretq_s8_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
37936 int (*)[__ARM_mve_type_int64x2_t]: __arm_vreinterpretq_s8_s64 (__ARM_mve_coerce(__p0, int64x2_t)), \
37937 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vreinterpretq_s8_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
37938 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vreinterpretq_s8_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
37939 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vreinterpretq_s8_u32 (__ARM_mve_coerce(__p0, uint32x4_t)), \
37940 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vreinterpretq_s8_u64 (__ARM_mve_coerce(__p0, uint64x2_t)), \
37941 int (*)[__ARM_mve_type_float32x4_t]: __arm_vreinterpretq_s8_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
37943 #define __arm_vreinterpretq_u16(p0) ({ __typeof(p0) __p0 = (p0); \
37944 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
37945 int (*)[__ARM_mve_type_float16x8_t]: __arm_vreinterpretq_u16_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
37946 int (*)[__ARM_mve_type_int8x16_t]: __arm_vreinterpretq_u16_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
37947 int (*)[__ARM_mve_type_int32x4_t]: __arm_vreinterpretq_u16_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
37948 int (*)[__ARM_mve_type_int64x2_t]: __arm_vreinterpretq_u16_s64 (__ARM_mve_coerce(__p0, int64x2_t)), \
37949 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vreinterpretq_u16_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
37950 int (*)[__ARM_mve_type_int16x8_t]: __arm_vreinterpretq_u16_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
37951 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vreinterpretq_u16_u32 (__ARM_mve_coerce(__p0, uint32x4_t)), \
37952 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vreinterpretq_u16_u64 (__ARM_mve_coerce(__p0, uint64x2_t)), \
37953 int (*)[__ARM_mve_type_float32x4_t]: __arm_vreinterpretq_u16_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
37955 #define __arm_vreinterpretq_u32(p0) ({ __typeof(p0) __p0 = (p0); \
37956 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
37957 int (*)[__ARM_mve_type_float16x8_t]: __arm_vreinterpretq_u32_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
37958 int (*)[__ARM_mve_type_int16x8_t]: __arm_vreinterpretq_u32_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
37959 int (*)[__ARM_mve_type_int8x16_t]: __arm_vreinterpretq_u32_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
37960 int (*)[__ARM_mve_type_int64x2_t]: __arm_vreinterpretq_u32_s64 (__ARM_mve_coerce(__p0, int64x2_t)), \
37961 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vreinterpretq_u32_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
37962 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vreinterpretq_u32_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
37963 int (*)[__ARM_mve_type_int32x4_t]: __arm_vreinterpretq_u32_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
37964 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vreinterpretq_u32_u64 (__ARM_mve_coerce(__p0, uint64x2_t)), \
37965 int (*)[__ARM_mve_type_float32x4_t]: __arm_vreinterpretq_u32_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
37967 #define __arm_vreinterpretq_u64(p0) ({ __typeof(p0) __p0 = (p0); \
37968 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
37969 int (*)[__ARM_mve_type_float16x8_t]: __arm_vreinterpretq_u64_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
37970 int (*)[__ARM_mve_type_int16x8_t]: __arm_vreinterpretq_u64_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
37971 int (*)[__ARM_mve_type_int32x4_t]: __arm_vreinterpretq_u64_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
37972 int (*)[__ARM_mve_type_int8x16_t]: __arm_vreinterpretq_u64_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
37973 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vreinterpretq_u64_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
37974 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vreinterpretq_u64_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
37975 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vreinterpretq_u64_u32 (__ARM_mve_coerce(__p0, uint32x4_t)), \
37976 int (*)[__ARM_mve_type_int64x2_t]: __arm_vreinterpretq_u64_s64 (__ARM_mve_coerce(__p0, int64x2_t)), \
37977 int (*)[__ARM_mve_type_float32x4_t]: __arm_vreinterpretq_u64_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
37979 #define __arm_vreinterpretq_u8(p0) ({ __typeof(p0) __p0 = (p0); \
37980 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
37981 int (*)[__ARM_mve_type_float16x8_t]: __arm_vreinterpretq_u8_f16 (__ARM_mve_coerce(__p0, float16x8_t)), \
37982 int (*)[__ARM_mve_type_int16x8_t]: __arm_vreinterpretq_u8_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
37983 int (*)[__ARM_mve_type_int32x4_t]: __arm_vreinterpretq_u8_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
37984 int (*)[__ARM_mve_type_int64x2_t]: __arm_vreinterpretq_u8_s64 (__ARM_mve_coerce(__p0, int64x2_t)), \
37985 int (*)[__ARM_mve_type_int8x16_t]: __arm_vreinterpretq_u8_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
37986 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vreinterpretq_u8_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
37987 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vreinterpretq_u8_u32 (__ARM_mve_coerce(__p0, uint32x4_t)), \
37988 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vreinterpretq_u8_u64 (__ARM_mve_coerce(__p0, uint64x2_t)), \
37989 int (*)[__ARM_mve_type_float32x4_t]: __arm_vreinterpretq_u8_f32 (__ARM_mve_coerce(__p0, float32x4_t)));})
37991 #define __arm_vstrwq_scatter_base_wb(p0,p1,p2) ({ __typeof(p2) __p2 = (p2); \
37992 _Generic( (int (*)[__ARM_mve_typeid(__p2)])0, \
37993 int (*)[__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_base_wb_s32 (p0, p1, __ARM_mve_coerce(__p2, int32x4_t)), \
37994 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_base_wb_u32 (p0, p1, __ARM_mve_coerce(__p2, uint32x4_t)), \
37995 int (*)[__ARM_mve_type_float32x4_t]: __arm_vstrwq_scatter_base_wb_f32 (p0, p1, __ARM_mve_coerce(__p2, float32x4_t)));})
37997 #define __arm_vstrwq_scatter_base_wb_p(p0,p1,p2,p3) ({ __typeof(p2) __p2 = (p2); \
37998 _Generic( (int (*)[__ARM_mve_typeid(__p2)])0, \
37999 int (*)[__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_base_wb_p_s32 (p0, p1, __ARM_mve_coerce(__p2, int32x4_t), p3), \
38000 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_base_wb_p_u32 (p0, p1, __ARM_mve_coerce(__p2, uint32x4_t), p3), \
38001 int (*)[__ARM_mve_type_float32x4_t]: __arm_vstrwq_scatter_base_wb_p_f32 (p0, p1, __ARM_mve_coerce(__p2, float32x4_t), p3));})
38003 #define __arm_vabdq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38004 __typeof(p2) __p2 = (p2); \
38005 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
38006 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vabdq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
38007 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vabdq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
38008 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vabdq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
38009 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vabdq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
38010 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vabdq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
38011 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vabdq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
38012 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vabdq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
38013 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vabdq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
38015 #define __arm_vabsq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
38016 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
38017 int (*)[__ARM_mve_type_int8x16_t]: __arm_vabsq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2), \
38018 int (*)[__ARM_mve_type_int16x8_t]: __arm_vabsq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2), \
38019 int (*)[__ARM_mve_type_int32x4_t]: __arm_vabsq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), p2), \
38020 int (*)[__ARM_mve_type_float16x8_t]: __arm_vabsq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), p2), \
38021 int (*)[__ARM_mve_type_float32x4_t]: __arm_vabsq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), p2));})
38023 #define __arm_vaddq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38024 __typeof(p2) __p2 = (p2); \
38025 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
38026 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vaddq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
38027 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vaddq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
38028 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vaddq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
38029 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vaddq_x_n_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
38030 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vaddq_x_n_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
38031 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vaddq_x_n_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
38032 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vaddq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
38033 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vaddq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
38034 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vaddq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
38035 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vaddq_x_n_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
38036 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vaddq_x_n_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
38037 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vaddq_x_n_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3), \
38038 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vaddq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
38039 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vaddq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3), \
38040 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vaddq_x_n_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce2(p2, double), p3), \
38041 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vaddq_x_n_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce2(p2, double), p3));})
38043 #define __arm_vandq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38044 __typeof(p2) __p2 = (p2); \
38045 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
38046 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vandq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
38047 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vandq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
38048 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vandq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
38049 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vandq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
38050 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vandq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
38051 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vandq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
38052 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vandq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
38053 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vandq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
38055 #define __arm_vbicq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38056 __typeof(p2) __p2 = (p2); \
38057 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
38058 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vbicq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
38059 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vbicq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
38060 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vbicq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
38061 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vbicq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
38062 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vbicq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
38063 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vbicq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
38064 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vbicq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
38065 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vbicq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
38067 #define __arm_vbrsrq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38068 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
38069 int (*)[__ARM_mve_type_int8x16_t]: __arm_vbrsrq_x_n_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2, p3), \
38070 int (*)[__ARM_mve_type_int16x8_t]: __arm_vbrsrq_x_n_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
38071 int (*)[__ARM_mve_type_int32x4_t]: __arm_vbrsrq_x_n_s32 (__ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
38072 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vbrsrq_x_n_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2, p3), \
38073 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vbrsrq_x_n_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
38074 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vbrsrq_x_n_u32 (__ARM_mve_coerce(__p1, uint32x4_t), p2, p3), \
38075 int (*)[__ARM_mve_type_float16x8_t]: __arm_vbrsrq_x_n_f16 (__ARM_mve_coerce(__p1, float16x8_t), p2, p3), \
38076 int (*)[__ARM_mve_type_float32x4_t]: __arm_vbrsrq_x_n_f32 (__ARM_mve_coerce(__p1, float32x4_t), p2, p3));})
38078 #define __arm_vcaddq_rot270_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38079 __typeof(p2) __p2 = (p2); \
38080 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
38081 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcaddq_rot270_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
38082 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcaddq_rot270_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
38083 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcaddq_rot270_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
38084 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcaddq_rot270_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
38085 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcaddq_rot270_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
38086 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcaddq_rot270_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
38087 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcaddq_rot270_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
38088 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcaddq_rot270_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
38090 #define __arm_vcaddq_rot90_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38091 __typeof(p2) __p2 = (p2); \
38092 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
38093 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcaddq_rot90_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
38094 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcaddq_rot90_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
38095 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcaddq_rot90_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
38096 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcaddq_rot90_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
38097 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcaddq_rot90_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
38098 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcaddq_rot90_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
38099 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcaddq_rot90_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
38100 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcaddq_rot90_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
38102 #define __arm_vcmulq_rot180_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38103 __typeof(p2) __p2 = (p2); \
38104 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
38105 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmulq_rot180_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
38106 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmulq_rot180_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
38108 #define __arm_vcmulq_rot270_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38109 __typeof(p2) __p2 = (p2); \
38110 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
38111 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmulq_rot270_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
38112 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmulq_rot270_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
38114 #define __arm_vcmulq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38115 __typeof(p2) __p2 = (p2); \
38116 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
38117 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmulq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
38118 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmulq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
38120 #define __arm_vcvtq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
38121 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
38122 int (*)[__ARM_mve_type_int16x8_t]: __arm_vcvtq_x_f16_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2), \
38123 int (*)[__ARM_mve_type_int32x4_t]: __arm_vcvtq_x_f32_s32 (__ARM_mve_coerce(__p1, int32x4_t), p2), \
38124 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vcvtq_x_f16_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2), \
38125 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vcvtq_x_f32_u32 (__ARM_mve_coerce(__p1, uint32x4_t), p2));})
38127 #define __arm_vcvtq_x_n(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38128 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
38129 int (*)[__ARM_mve_type_int16x8_t]: __arm_vcvtq_x_n_f16_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
38130 int (*)[__ARM_mve_type_int32x4_t]: __arm_vcvtq_x_n_f32_s32 (__ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
38131 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vcvtq_x_n_f16_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
38132 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vcvtq_x_n_f32_u32 (__ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
38134 #define __arm_veorq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38135 __typeof(p2) __p2 = (p2); \
38136 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
38137 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_veorq_x_s8(__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
38138 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_veorq_x_s16(__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
38139 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_veorq_x_s32(__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
38140 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_veorq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
38141 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_veorq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
38142 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_veorq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
38143 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_veorq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
38144 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_veorq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
38146 #define __arm_vmaxnmq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38147 __typeof(p2) __p2 = (p2); \
38148 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
38149 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vmaxnmq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
38150 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vmaxnmq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
38152 #define __arm_vminnmq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38153 __typeof(p2) __p2 = (p2); \
38154 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
38155 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vminnmq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
38156 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vminnmq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
38158 #define __arm_vmulq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38159 __typeof(p2) __p2 = (p2); \
38160 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
38161 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmulq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
38162 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmulq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
38163 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmulq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
38164 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vmulq_x_n_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
38165 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vmulq_x_n_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
38166 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vmulq_x_n_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
38167 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
38168 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
38169 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmulq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
38170 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vmulq_x_n_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
38171 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vmulq_x_n_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
38172 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vmulq_x_n_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3), \
38173 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vmulq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
38174 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vmulq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3), \
38175 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vmulq_x_n_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce2(p2, double), p3), \
38176 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vmulq_x_n_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce2(p2, double), p3));})
38178 #define __arm_vnegq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
38179 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
38180 int (*)[__ARM_mve_type_int8x16_t]: __arm_vnegq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2), \
38181 int (*)[__ARM_mve_type_int16x8_t]: __arm_vnegq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2), \
38182 int (*)[__ARM_mve_type_int32x4_t]: __arm_vnegq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), p2), \
38183 int (*)[__ARM_mve_type_float16x8_t]: __arm_vnegq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), p2), \
38184 int (*)[__ARM_mve_type_float32x4_t]: __arm_vnegq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), p2));})
38186 #define __arm_vornq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38187 __typeof(p2) __p2 = (p2); \
38188 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
38189 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vornq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
38190 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vornq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
38191 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vornq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
38192 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vornq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
38193 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vornq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
38194 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vornq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
38195 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vornq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
38196 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vornq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
38198 #define __arm_vorrq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38199 __typeof(p2) __p2 = (p2); \
38200 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
38201 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vorrq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
38202 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vorrq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
38203 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vorrq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
38204 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vorrq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
38205 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vorrq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
38206 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vorrq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
38207 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vorrq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
38208 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vorrq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
38210 #define __arm_vrev32q_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
38211 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
38212 int (*)[__ARM_mve_type_int8x16_t]: __arm_vrev32q_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2), \
38213 int (*)[__ARM_mve_type_int16x8_t]: __arm_vrev32q_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2), \
38214 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vrev32q_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2), \
38215 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vrev32q_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2), \
38216 int (*)[__ARM_mve_type_float16x8_t]: __arm_vrev32q_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), p2));})
38218 #define __arm_vrev64q_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
38219 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
38220 int (*)[__ARM_mve_type_int8x16_t]: __arm_vrev64q_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2), \
38221 int (*)[__ARM_mve_type_int16x8_t]: __arm_vrev64q_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2), \
38222 int (*)[__ARM_mve_type_int32x4_t]: __arm_vrev64q_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), p2), \
38223 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vrev64q_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2), \
38224 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vrev64q_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2), \
38225 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vrev64q_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), p2), \
38226 int (*)[__ARM_mve_type_float16x8_t]: __arm_vrev64q_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), p2), \
38227 int (*)[__ARM_mve_type_float32x4_t]: __arm_vrev64q_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), p2));})
38229 #define __arm_vrndaq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
38230 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
38231 int (*)[__ARM_mve_type_float16x8_t]: __arm_vrndaq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), p2), \
38232 int (*)[__ARM_mve_type_float32x4_t]: __arm_vrndaq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), p2));})
38234 #define __arm_vrndmq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
38235 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
38236 int (*)[__ARM_mve_type_float16x8_t]: __arm_vrndmq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), p2), \
38237 int (*)[__ARM_mve_type_float32x4_t]: __arm_vrndmq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), p2));})
38239 #define __arm_vrndnq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
38240 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
38241 int (*)[__ARM_mve_type_float16x8_t]: __arm_vrndnq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), p2), \
38242 int (*)[__ARM_mve_type_float32x4_t]: __arm_vrndnq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), p2));})
38244 #define __arm_vrndpq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
38245 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
38246 int (*)[__ARM_mve_type_float16x8_t]: __arm_vrndpq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), p2), \
38247 int (*)[__ARM_mve_type_float32x4_t]: __arm_vrndpq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), p2));})
38249 #define __arm_vrndq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
38250 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
38251 int (*)[__ARM_mve_type_float16x8_t]: __arm_vrndq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), p2), \
38252 int (*)[__ARM_mve_type_float32x4_t]: __arm_vrndq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), p2));})
38254 #define __arm_vrndxq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
38255 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
38256 int (*)[__ARM_mve_type_float16x8_t]: __arm_vrndxq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), p2), \
38257 int (*)[__ARM_mve_type_float32x4_t]: __arm_vrndxq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), p2));})
38259 #define __arm_vsubq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38260 __typeof(p2) __p2 = (p2); \
38261 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
38262 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vsubq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
38263 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vsubq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
38264 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vsubq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
38265 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vsubq_x_n_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
38266 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vsubq_x_n_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
38267 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vsubq_x_n_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
38268 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vsubq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
38269 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vsubq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
38270 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vsubq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
38271 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vsubq_x_n_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
38272 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vsubq_x_n_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
38273 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vsubq_x_n_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3), \
38274 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vsubq_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
38275 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vsubq_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3), \
38276 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_fp_n]: __arm_vsubq_x_n_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce2(p2, double), p3), \
38277 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_fp_n]: __arm_vsubq_x_n_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce2(p2, double), p3));})
38279 #define __arm_vcmulq_rot90_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
38280 __typeof(p2) __p2 = (p2); \
38281 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
38282 int (*)[__ARM_mve_type_float16x8_t][__ARM_mve_type_float16x8_t]: __arm_vcmulq_rot90_x_f16 (__ARM_mve_coerce(__p1, float16x8_t), __ARM_mve_coerce(__p2, float16x8_t), p3), \
38283 int (*)[__ARM_mve_type_float32x4_t][__ARM_mve_type_float32x4_t]: __arm_vcmulq_rot90_x_f32 (__ARM_mve_coerce(__p1, float32x4_t), __ARM_mve_coerce(__p2, float32x4_t), p3));})
38285 #define __arm_vgetq_lane(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38286 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38287 int (*)[__ARM_mve_type_int8x16_t]: __arm_vgetq_lane_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
38288 int (*)[__ARM_mve_type_int16x8_t]: __arm_vgetq_lane_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
38289 int (*)[__ARM_mve_type_int32x4_t]: __arm_vgetq_lane_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
38290 int (*)[__ARM_mve_type_int64x2_t]: __arm_vgetq_lane_s64 (__ARM_mve_coerce(__p0, int64x2_t), p1), \
38291 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vgetq_lane_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
38292 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vgetq_lane_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
38293 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vgetq_lane_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1), \
38294 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vgetq_lane_u64 (__ARM_mve_coerce(__p0, uint64x2_t), p1), \
38295 int (*)[__ARM_mve_type_float16x8_t]: __arm_vgetq_lane_f16 (__ARM_mve_coerce(__p0, float16x8_t), p1), \
38296 int (*)[__ARM_mve_type_float32x4_t]: __arm_vgetq_lane_f32 (__ARM_mve_coerce(__p0, float32x4_t), p1));})
38298 #define __arm_vsetq_lane(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
38299 __typeof(p1) __p1 = (p1); \
38300 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38301 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int8x16_t]: __arm_vsetq_lane_s8 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int8x16_t), p2), \
38302 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t]: __arm_vsetq_lane_s16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int16x8_t), p2), \
38303 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t]: __arm_vsetq_lane_s32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int32x4_t), p2), \
38304 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int64x2_t]: __arm_vsetq_lane_s64 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int64x2_t), p2), \
38305 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint8x16_t]: __arm_vsetq_lane_u8 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
38306 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint16x8_t]: __arm_vsetq_lane_u16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
38307 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t]: __arm_vsetq_lane_u32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
38308 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint64x2_t]: __arm_vsetq_lane_u64 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint64x2_t), p2), \
38309 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float16x8_t]: __arm_vsetq_lane_f16 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float16x8_t), p2), \
38310 int (*)[__ARM_mve_type_fp_n][__ARM_mve_type_float32x4_t]: __arm_vsetq_lane_f32 (__ARM_mve_coerce2(p0, double), __ARM_mve_coerce(__p1, float32x4_t), p2));})
38312 #else /* MVE Integer. */
38314 #define __arm_vstrwq_scatter_base_wb(p0,p1,p2) ({ __typeof(p2) __p2 = (p2); \
38315 _Generic( (int (*)[__ARM_mve_typeid(__p2)])0, \
38316 int (*)[__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_base_wb_s32 (p0, p1, __ARM_mve_coerce(__p2, int32x4_t)), \
38317 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_base_wb_u32 (p0, p1, __ARM_mve_coerce(__p2, uint32x4_t)));})
38319 #define __arm_vstrwq_scatter_base_wb_p(p0,p1,p2,p3) ({ __typeof(p2) __p2 = (p2); \
38320 _Generic( (int (*)[__ARM_mve_typeid(__p2)])0, \
38321 int (*)[__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_base_wb_p_s32 (p0, p1, __ARM_mve_coerce(__p2, int32x4_t), p3), \
38322 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_base_wb_p_u32 (p0, p1, __ARM_mve_coerce(__p2, uint32x4_t), p3));})
38324 #define __arm_vst4q(p0,p1) ({ __typeof(p1) __p1 = (p1); \
38325 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
38326 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_int8x16x4_t]: __arm_vst4q_s8 (__ARM_mve_coerce(p0, int8_t *), __ARM_mve_coerce(__p1, int8x16x4_t)), \
38327 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_int16x8x4_t]: __arm_vst4q_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, int16x8x4_t)), \
38328 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4x4_t]: __arm_vst4q_s32 (__ARM_mve_coerce(p0, int32_t *), __ARM_mve_coerce(__p1, int32x4x4_t)), \
38329 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint8x16x4_t]: __arm_vst4q_u8 (__ARM_mve_coerce(p0, uint8_t *), __ARM_mve_coerce(__p1, uint8x16x4_t)), \
38330 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8x4_t]: __arm_vst4q_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8x4_t)), \
38331 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4x4_t]: __arm_vst4q_u32 (__ARM_mve_coerce(p0, uint32_t *), __ARM_mve_coerce(__p1, uint32x4x4_t)));})
38333 #define __arm_vabsq(p0) ({ __typeof(p0) __p0 = (p0); \
38334 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38335 int (*)[__ARM_mve_type_int8x16_t]: __arm_vabsq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
38336 int (*)[__ARM_mve_type_int16x8_t]: __arm_vabsq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
38337 int (*)[__ARM_mve_type_int32x4_t]: __arm_vabsq_s32 (__ARM_mve_coerce(__p0, int32x4_t)));})
38339 #define __arm_vclsq(p0) ({ __typeof(p0) __p0 = (p0); \
38340 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38341 int (*)[__ARM_mve_type_int8x16_t]: __arm_vclsq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
38342 int (*)[__ARM_mve_type_int16x8_t]: __arm_vclsq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
38343 int (*)[__ARM_mve_type_int32x4_t]: __arm_vclsq_s32 (__ARM_mve_coerce(__p0, int32x4_t)));})
38345 #define __arm_vclzq(p0) ({ __typeof(p0) __p0 = (p0); \
38346 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38347 int (*)[__ARM_mve_type_int8x16_t]: __arm_vclzq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
38348 int (*)[__ARM_mve_type_int16x8_t]: __arm_vclzq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
38349 int (*)[__ARM_mve_type_int32x4_t]: __arm_vclzq_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
38350 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vclzq_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
38351 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vclzq_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
38352 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vclzq_u32 (__ARM_mve_coerce(__p0, uint32x4_t)));})
38354 #define __arm_vnegq(p0) ({ __typeof(p0) __p0 = (p0); \
38355 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38356 int (*)[__ARM_mve_type_int8x16_t]: __arm_vnegq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
38357 int (*)[__ARM_mve_type_int16x8_t]: __arm_vnegq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
38358 int (*)[__ARM_mve_type_int32x4_t]: __arm_vnegq_s32 (__ARM_mve_coerce(__p0, int32x4_t)));})
38360 #define __arm_vmovlbq(p0) ({ __typeof(p0) __p0 = (p0); \
38361 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38362 int (*)[__ARM_mve_type_int8x16_t]: __arm_vmovlbq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
38363 int (*)[__ARM_mve_type_int16x8_t]: __arm_vmovlbq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
38364 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vmovlbq_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
38365 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vmovlbq_u16 (__ARM_mve_coerce(__p0, uint16x8_t)));})
38367 #define __arm_vmovltq(p0) ({ __typeof(p0) __p0 = (p0); \
38368 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38369 int (*)[__ARM_mve_type_int8x16_t]: __arm_vmovltq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
38370 int (*)[__ARM_mve_type_int16x8_t]: __arm_vmovltq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
38371 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vmovltq_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
38372 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vmovltq_u16 (__ARM_mve_coerce(__p0, uint16x8_t)));})
38374 #define __arm_vmvnq(p0) ({ __typeof(p0) __p0 = (p0); \
38375 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38376 int (*)[__ARM_mve_type_int8x16_t]: __arm_vmvnq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
38377 int (*)[__ARM_mve_type_int16x8_t]: __arm_vmvnq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
38378 int (*)[__ARM_mve_type_int32x4_t]: __arm_vmvnq_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
38379 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vmvnq_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
38380 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vmvnq_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
38381 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vmvnq_u32 (__ARM_mve_coerce(__p0, uint32x4_t)));})
38383 #define __arm_vrev16q(p0) ({ __typeof(p0) __p0 = (p0); \
38384 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38385 int (*)[__ARM_mve_type_int8x16_t]: __arm_vrev16q_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
38386 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vrev16q_u8 (__ARM_mve_coerce(__p0, uint8x16_t)));})
38388 #define __arm_vrev32q(p0) ({ __typeof(p0) __p0 = (p0); \
38389 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38390 int (*)[__ARM_mve_type_int8x16_t]: __arm_vrev32q_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
38391 int (*)[__ARM_mve_type_int16x8_t]: __arm_vrev32q_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
38392 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vrev32q_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
38393 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vrev32q_u16 (__ARM_mve_coerce(__p0, uint16x8_t)));})
38395 #define __arm_vrev64q(p0) ({ __typeof(p0) __p0 = (p0); \
38396 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38397 int (*)[__ARM_mve_type_int8x16_t]: __arm_vrev64q_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
38398 int (*)[__ARM_mve_type_int16x8_t]: __arm_vrev64q_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
38399 int (*)[__ARM_mve_type_int32x4_t]: __arm_vrev64q_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
38400 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vrev64q_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
38401 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vrev64q_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
38402 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vrev64q_u32 (__ARM_mve_coerce(__p0, uint32x4_t)));})
38404 #define __arm_vqabsq(p0) ({ __typeof(p0) __p0 = (p0); \
38405 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38406 int (*)[__ARM_mve_type_int8x16_t]: __arm_vqabsq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
38407 int (*)[__ARM_mve_type_int16x8_t]: __arm_vqabsq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
38408 int (*)[__ARM_mve_type_int32x4_t]: __arm_vqabsq_s32 (__ARM_mve_coerce(__p0, int32x4_t)));})
38410 #define __arm_vqnegq(p0) ({ __typeof(p0) __p0 = (p0); \
38411 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38412 int (*)[__ARM_mve_type_int8x16_t]: __arm_vqnegq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
38413 int (*)[__ARM_mve_type_int16x8_t]: __arm_vqnegq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
38414 int (*)[__ARM_mve_type_int32x4_t]: __arm_vqnegq_s32 (__ARM_mve_coerce(__p0, int32x4_t)));})
38416 #define __arm_vshrq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38417 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38418 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshrq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
38419 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshrq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
38420 int (*)[__ARM_mve_type_int32x4_t]: __arm_vshrq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
38421 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshrq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
38422 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshrq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
38423 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vshrq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1));})
38425 #define __arm_vcmpneq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38426 __typeof(p1) __p1 = (p1); \
38427 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38428 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpneq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
38429 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpneq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
38430 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpneq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
38431 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vcmpneq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
38432 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vcmpneq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
38433 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vcmpneq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
38434 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpneq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38435 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpneq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38436 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpneq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38437 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcmpneq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38438 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcmpneq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38439 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcmpneq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38441 #define __arm_vshlq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38442 __typeof(p1) __p1 = (p1); \
38443 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38444 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vshlq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38445 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vshlq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38446 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vshlq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38447 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vshlq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38448 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vshlq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38449 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vshlq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
38451 #define __arm_vsubq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38452 __typeof(p1) __p1 = (p1); \
38453 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38454 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vsubq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38455 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vsubq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38456 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vsubq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38457 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vsubq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38458 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vsubq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38459 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vsubq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
38460 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vsubq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
38461 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vsubq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
38462 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vsubq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
38463 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vsubq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
38464 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vsubq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
38465 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vsubq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)));})
38467 #define __arm_vshlq_r(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38468 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38469 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshlq_r_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
38470 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshlq_r_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
38471 int (*)[__ARM_mve_type_int32x4_t]: __arm_vshlq_r_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
38472 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshlq_r_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
38473 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshlq_r_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
38474 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vshlq_r_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1));})
38476 #define __arm_vrshlq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38477 __typeof(p1) __p1 = (p1); \
38478 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38479 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vrshlq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
38480 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vrshlq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
38481 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vrshlq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
38482 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vrshlq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
38483 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vrshlq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
38484 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vrshlq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
38485 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrshlq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38486 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrshlq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38487 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrshlq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38488 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrshlq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38489 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrshlq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38490 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrshlq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
38492 #define __arm_vrmulhq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38493 __typeof(p1) __p1 = (p1); \
38494 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38495 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrmulhq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38496 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrmulhq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38497 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrmulhq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38498 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vrmulhq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38499 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vrmulhq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38500 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vrmulhq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38502 #define __arm_vrhaddq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38503 __typeof(p1) __p1 = (p1); \
38504 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38505 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrhaddq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38506 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrhaddq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38507 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrhaddq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38508 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vrhaddq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38509 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vrhaddq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38510 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vrhaddq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38512 #define __arm_vqsubq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38513 __typeof(p1) __p1 = (p1); \
38514 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38515 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqsubq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
38516 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqsubq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
38517 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqsubq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
38518 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vqsubq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
38519 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vqsubq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
38520 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vqsubq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
38521 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqsubq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38522 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqsubq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38523 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqsubq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38524 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vqsubq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38525 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vqsubq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38526 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vqsubq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38528 #define __arm_vqshlq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38529 __typeof(p1) __p1 = (p1); \
38530 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38531 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqshlq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38532 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqshlq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38533 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqshlq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38534 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqshlq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38535 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqshlq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38536 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqshlq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
38538 #define __arm_vqshlq_r(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38539 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38540 int (*)[__ARM_mve_type_int8x16_t]: __arm_vqshlq_r_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
38541 int (*)[__ARM_mve_type_int16x8_t]: __arm_vqshlq_r_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
38542 int (*)[__ARM_mve_type_int32x4_t]: __arm_vqshlq_r_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
38543 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vqshlq_r_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
38544 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vqshlq_r_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
38545 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vqshlq_r_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1));})
38547 #define __arm_vqshluq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38548 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38549 int (*)[__ARM_mve_type_int8x16_t]: __arm_vqshluq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
38550 int (*)[__ARM_mve_type_int16x8_t]: __arm_vqshluq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
38551 int (*)[__ARM_mve_type_int32x4_t]: __arm_vqshluq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1));})
38553 #define __arm_vrshrq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38554 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38555 int (*)[__ARM_mve_type_int8x16_t]: __arm_vrshrq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
38556 int (*)[__ARM_mve_type_int16x8_t]: __arm_vrshrq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
38557 int (*)[__ARM_mve_type_int32x4_t]: __arm_vrshrq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
38558 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vrshrq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
38559 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vrshrq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
38560 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vrshrq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1));})
38562 #define __arm_vshlq_n(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38563 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38564 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshlq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
38565 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshlq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
38566 int (*)[__ARM_mve_type_int32x4_t]: __arm_vshlq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
38567 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshlq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
38568 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshlq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
38569 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vshlq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1));})
38571 #define __arm_vqshlq_n(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38572 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38573 int (*)[__ARM_mve_type_int8x16_t]: __arm_vqshlq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
38574 int (*)[__ARM_mve_type_int16x8_t]: __arm_vqshlq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
38575 int (*)[__ARM_mve_type_int32x4_t]: __arm_vqshlq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
38576 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vqshlq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
38577 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vqshlq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
38578 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vqshlq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1));})
38580 #define __arm_vqrshlq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38581 __typeof(p1) __p1 = (p1); \
38582 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38583 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrshlq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38584 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrshlq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38585 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrshlq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38586 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrshlq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38587 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrshlq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38588 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrshlq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38589 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqrshlq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
38590 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqrshlq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
38591 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqrshlq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
38592 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vqrshlq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
38593 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vqrshlq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
38594 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vqrshlq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)));})
38596 #define __arm_vqrdmulhq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38597 __typeof(p1) __p1 = (p1); \
38598 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38599 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrdmulhq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38600 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrdmulhq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38601 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrdmulhq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38602 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqrdmulhq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
38603 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqrdmulhq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
38604 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqrdmulhq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)));})
38606 #define __arm_vqdmulhq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38607 __typeof(p1) __p1 = (p1); \
38608 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38609 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqdmulhq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
38610 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqdmulhq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
38611 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqdmulhq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
38612 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqdmulhq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38613 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmulhq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38614 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmulhq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
38616 #define __arm_vqaddq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38617 __typeof(p1) __p1 = (p1); \
38618 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38619 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqaddq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
38620 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqaddq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
38621 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqaddq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
38622 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vqaddq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
38623 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vqaddq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
38624 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vqaddq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
38625 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqaddq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38626 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqaddq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38627 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqaddq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38628 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vqaddq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38629 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vqaddq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38630 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vqaddq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38632 #define __arm_vorrq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38633 __typeof(p1) __p1 = (p1); \
38634 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38635 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vorrq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38636 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vorrq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38637 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vorrq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38638 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vorrq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38639 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vorrq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38640 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vorrq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38642 #define __arm_vornq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38643 __typeof(p1) __p1 = (p1); \
38644 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38645 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vornq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38646 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vornq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38647 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vornq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38648 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vornq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38649 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vornq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38650 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vornq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38652 #define __arm_vmulq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38653 __typeof(p1) __p1 = (p1); \
38654 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38655 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vmulq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
38656 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vmulq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
38657 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vmulq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
38658 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vmulq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
38659 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vmulq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
38660 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vmulq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
38661 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmulq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38662 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmulq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38663 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmulq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38664 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38665 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38666 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmulq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38668 #define __arm_vmulltq_int(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38669 __typeof(p1) __p1 = (p1); \
38670 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38671 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmulltq_int_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38672 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmulltq_int_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38673 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmulltq_int_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38674 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulltq_int_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38675 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulltq_int_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38676 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmulltq_int_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38678 #define __arm_vmullbq_int(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38679 __typeof(p1) __p1 = (p1); \
38680 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38681 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmullbq_int_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38682 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmullbq_int_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38683 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmullbq_int_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38684 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmullbq_int_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38685 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmullbq_int_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38686 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmullbq_int_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38688 #define __arm_vmulhq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38689 __typeof(p1) __p1 = (p1); \
38690 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38691 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmulhq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38692 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmulhq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38693 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmulhq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38694 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulhq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38695 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulhq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38696 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmulhq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38698 #define __arm_vminq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38699 __typeof(p1) __p1 = (p1); \
38700 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38701 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vminq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38702 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vminq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38703 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vminq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38704 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vminq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38705 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vminq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38706 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vminq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38708 #define __arm_vminaq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38709 __typeof(p1) __p1 = (p1); \
38710 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38711 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vminaq_s8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38712 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vminaq_s16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38713 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vminaq_s32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
38715 #define __arm_vmaxq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38716 __typeof(p1) __p1 = (p1); \
38717 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38718 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmaxq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38719 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmaxq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38720 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmaxq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38721 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmaxq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38722 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmaxq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38723 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmaxq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38725 #define __arm_vmaxaq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38726 __typeof(p1) __p1 = (p1); \
38727 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38728 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmaxaq_s8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38729 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmaxaq_s16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38730 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmaxaq_s32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
38732 #define __arm_vhsubq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38733 __typeof(p1) __p1 = (p1); \
38734 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38735 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vhsubq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
38736 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vhsubq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
38737 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vhsubq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
38738 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vhsubq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
38739 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vhsubq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
38740 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vhsubq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
38741 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vhsubq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38742 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vhsubq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38743 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vhsubq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38744 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vhsubq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38745 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vhsubq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38746 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vhsubq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38748 #define __arm_vhcaddq_rot90(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38749 __typeof(p1) __p1 = (p1); \
38750 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38751 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vhcaddq_rot90_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38752 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vhcaddq_rot90_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38753 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vhcaddq_rot90_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
38755 #define __arm_vhcaddq_rot270(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38756 __typeof(p1) __p1 = (p1); \
38757 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38758 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vhcaddq_rot270_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38759 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vhcaddq_rot270_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38760 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vhcaddq_rot270_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
38762 #define __arm_vhaddq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38763 __typeof(p1) __p1 = (p1); \
38764 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38765 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vhaddq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
38766 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vhaddq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
38767 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vhaddq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
38768 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vhaddq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
38769 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vhaddq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
38770 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vhaddq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
38771 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vhaddq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38772 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vhaddq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38773 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vhaddq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38774 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vhaddq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38775 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vhaddq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38776 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vhaddq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38778 #define __arm_veorq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38779 __typeof(p1) __p1 = (p1); \
38780 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38781 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_veorq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38782 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_veorq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38783 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_veorq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38784 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_veorq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38785 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_veorq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38786 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_veorq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38788 #define __arm_vcaddq_rot90(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38789 __typeof(p1) __p1 = (p1); \
38790 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38791 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcaddq_rot90_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38792 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcaddq_rot90_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38793 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcaddq_rot90_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38794 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcaddq_rot90_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38795 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcaddq_rot90_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38796 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcaddq_rot90_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38798 #define __arm_vcaddq_rot270(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38799 __typeof(p1) __p1 = (p1); \
38800 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38801 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcaddq_rot270_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38802 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcaddq_rot270_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38803 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcaddq_rot270_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38804 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcaddq_rot270_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38805 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcaddq_rot270_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38806 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcaddq_rot270_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38808 #define __arm_vbrsrq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38809 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38810 int (*)[__ARM_mve_type_int8x16_t]: __arm_vbrsrq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
38811 int (*)[__ARM_mve_type_int16x8_t]: __arm_vbrsrq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
38812 int (*)[__ARM_mve_type_int32x4_t]: __arm_vbrsrq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
38813 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vbrsrq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
38814 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vbrsrq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
38815 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vbrsrq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1));})
38817 #define __arm_vbicq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38818 __typeof(p1) __p1 = (p1); \
38819 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38820 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vbicq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce1 (__p1, int)), \
38821 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vbicq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce1 (__p1, int)), \
38822 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vbicq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce1 (__p1, int)), \
38823 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vbicq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce1 (__p1, int)), \
38824 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vbicq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38825 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vbicq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38826 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vbicq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38827 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vbicq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38828 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vbicq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38829 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vbicq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38831 #define __arm_vaddq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38832 __typeof(p1) __p1 = (p1); \
38833 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38834 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vaddq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38835 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vaddq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38836 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vaddq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38837 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vaddq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38838 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vaddq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38839 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vaddq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
38840 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vaddq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
38841 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vaddq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
38842 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vaddq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)), \
38843 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vaddq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
38844 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vaddq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
38845 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vaddq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)));})
38847 #define __arm_vandq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38848 __typeof(p1) __p1 = (p1); \
38849 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38850 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vandq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38851 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vandq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38852 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vandq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38853 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vandq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38854 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vandq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38855 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vandq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38857 #define __arm_vabdq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38858 __typeof(p1) __p1 = (p1); \
38859 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38860 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vabdq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38861 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vabdq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38862 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vabdq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38863 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vabdq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38864 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vabdq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38865 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vabdq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38867 #define __arm_vcmpeqq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38868 __typeof(p1) __p1 = (p1); \
38869 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38870 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpeqq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38871 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpeqq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38872 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpeqq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38873 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcmpeqq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38874 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcmpeqq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38875 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcmpeqq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
38876 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
38877 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
38878 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
38879 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
38880 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
38881 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)));})
38883 #define __arm_vqmovntq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38884 __typeof(p1) __p1 = (p1); \
38885 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38886 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqmovntq_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38887 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqmovntq_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38888 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqmovntq_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38889 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqmovntq_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38891 #define __arm_vqmovnbq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38892 __typeof(p1) __p1 = (p1); \
38893 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38894 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqmovnbq_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38895 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqmovnbq_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38896 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqmovnbq_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38897 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqmovnbq_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38899 #define __arm_vmulltq_poly(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38900 __typeof(p1) __p1 = (p1); \
38901 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38902 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulltq_poly_p8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38903 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulltq_poly_p16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)));})
38905 #define __arm_vmullbq_poly(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38906 __typeof(p1) __p1 = (p1); \
38907 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38908 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmullbq_poly_p8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
38909 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmullbq_poly_p16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)));})
38911 #define __arm_vmovntq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38912 __typeof(p1) __p1 = (p1); \
38913 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38914 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vmovntq_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38915 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vmovntq_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38916 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vmovntq_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38917 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vmovntq_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38919 #define __arm_vmovnbq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38920 __typeof(p1) __p1 = (p1); \
38921 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38922 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vmovnbq_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38923 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vmovnbq_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38924 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vmovnbq_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
38925 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vmovnbq_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
38927 #define __arm_vmlaldavxq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38928 __typeof(p1) __p1 = (p1); \
38929 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38930 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlaldavxq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38931 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlaldavxq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
38933 #define __arm_vqmovuntq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38934 __typeof(p1) __p1 = (p1); \
38935 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38936 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqmovuntq_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38937 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqmovuntq_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t)));})
38939 #define __arm_vshlltq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38940 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38941 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshlltq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
38942 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshlltq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
38943 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshlltq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
38944 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshlltq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1));})
38946 #define __arm_vshllbq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38947 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
38948 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshllbq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
38949 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshllbq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
38950 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshllbq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
38951 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshllbq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1));})
38953 #define __arm_vqmovunbq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38954 __typeof(p1) __p1 = (p1); \
38955 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38956 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqmovunbq_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38957 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqmovunbq_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t)));})
38959 #define __arm_vqdmulltq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38960 __typeof(p1) __p1 = (p1); \
38961 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38962 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqdmulltq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
38963 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqdmulltq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
38964 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmulltq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38965 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmulltq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
38967 #define __arm_vqdmullbq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38968 __typeof(p1) __p1 = (p1); \
38969 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38970 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqdmullbq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
38971 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqdmullbq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)), \
38972 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmullbq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38973 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmullbq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
38975 #define __arm_vcmpgeq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38976 __typeof(p1) __p1 = (p1); \
38977 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38978 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpgeq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38979 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpgeq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38980 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpgeq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38981 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpgeq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
38982 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpgeq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
38983 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpgeq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)));})
38985 #define __arm_vcmpgtq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38986 __typeof(p1) __p1 = (p1); \
38987 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38988 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpgtq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38989 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpgtq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
38990 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpgtq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
38991 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpgtq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
38992 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpgtq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
38993 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpgtq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)));})
38995 #define __arm_vcmpleq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
38996 __typeof(p1) __p1 = (p1); \
38997 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
38998 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpleq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
38999 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpleq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
39000 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpleq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
39001 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpleq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
39002 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpleq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
39003 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpleq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)));})
39005 #define __arm_vcmpltq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
39006 __typeof(p1) __p1 = (p1); \
39007 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39008 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpltq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
39009 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpltq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
39010 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpltq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
39011 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpltq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int)), \
39012 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpltq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int)), \
39013 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpltq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int)));})
39015 #define __arm_vcmpneq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39016 __typeof(p1) __p1 = (p1); \
39017 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39018 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpneq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39019 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int), p2), \
39020 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int), p2), \
39021 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int), p2), \
39022 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int), p2), \
39023 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int), p2), \
39024 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int), p2), \
39025 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpneq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39026 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpneq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39027 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcmpneq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
39028 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcmpneq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39029 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcmpneq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39031 #define __arm_vshlcq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39032 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39033 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshlcq_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1, p2), \
39034 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshlcq_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1, p2), \
39035 int (*)[__ARM_mve_type_int32x4_t]: __arm_vshlcq_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1, p2), \
39036 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshlcq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1, p2), \
39037 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshlcq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1, p2), \
39038 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vshlcq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1, p2));})
39040 #define __arm_vcmpeqq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39041 __typeof(p1) __p1 = (p1); \
39042 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39043 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpeqq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39044 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpeqq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39045 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpeqq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39046 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcmpeqq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
39047 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcmpeqq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39048 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcmpeqq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
39049 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int), p2), \
39050 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int), p2), \
39051 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int), p2), \
39052 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int), p2), \
39053 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int), p2), \
39054 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vcmpeqq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int), p2));})
39056 #define __arm_vbicq_m_n(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39057 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39058 int (*)[__ARM_mve_type_int16x8_t]: __arm_vbicq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1, p2), \
39059 int (*)[__ARM_mve_type_int32x4_t]: __arm_vbicq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1, p2), \
39060 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vbicq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1, p2), \
39061 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vbicq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1, p2));})
39063 #define __arm_vqrshrnbq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39064 __typeof(p1) __p1 = (p1); \
39065 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39066 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqrshrnbq_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39067 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqrshrnbq_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39068 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqrshrnbq_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39069 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqrshrnbq_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39071 #define __arm_vqrshrunbq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39072 __typeof(p1) __p1 = (p1); \
39073 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39074 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqrshrunbq_n_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39075 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqrshrunbq_n_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
39077 #define __arm_vqrdmlsdhq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39078 __typeof(p1) __p1 = (p1); \
39079 __typeof(p2) __p2 = (p2); \
39080 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39081 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrdmlsdhq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
39082 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrdmlsdhq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
39083 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrdmlsdhq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
39085 #define __arm_vqrdmlsdhxq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39086 __typeof(p1) __p1 = (p1); \
39087 __typeof(p2) __p2 = (p2); \
39088 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39089 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrdmlsdhxq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
39090 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrdmlsdhxq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
39091 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrdmlsdhxq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
39093 #define __arm_vqrshlq_m_n(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39094 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39095 int (*)[__ARM_mve_type_int8x16_t]: __arm_vqrshlq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1, p2), \
39096 int (*)[__ARM_mve_type_int16x8_t]: __arm_vqrshlq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1, p2), \
39097 int (*)[__ARM_mve_type_int32x4_t]: __arm_vqrshlq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1, p2), \
39098 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vqrshlq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1, p2), \
39099 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vqrshlq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1, p2), \
39100 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vqrshlq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1, p2));})
39102 #define __arm_vqshlq_m_r(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39103 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39104 int (*)[__ARM_mve_type_int8x16_t]: __arm_vqshlq_m_r_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1, p2), \
39105 int (*)[__ARM_mve_type_int16x8_t]: __arm_vqshlq_m_r_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1, p2), \
39106 int (*)[__ARM_mve_type_int32x4_t]: __arm_vqshlq_m_r_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1, p2), \
39107 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vqshlq_m_r_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1, p2), \
39108 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vqshlq_m_r_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1, p2), \
39109 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vqshlq_m_r_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1, p2));})
39111 #define __arm_vrev64q_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39112 __typeof(p1) __p1 = (p1); \
39113 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39114 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrev64q_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39115 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrev64q_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39116 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrev64q_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39117 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vrev64q_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
39118 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vrev64q_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39119 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vrev64q_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39121 #define __arm_vrshlq_m_n(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39122 __typeof(p1) __p1 = (p1); \
39123 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39124 int (*)[__ARM_mve_type_int8x16_t]: __arm_vrshlq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1, p2), \
39125 int (*)[__ARM_mve_type_int16x8_t]: __arm_vrshlq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1, p2), \
39126 int (*)[__ARM_mve_type_int32x4_t]: __arm_vrshlq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1, p2), \
39127 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vrshlq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1, p2), \
39128 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vrshlq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1, p2), \
39129 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vrshlq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __p1, p2));})
39131 #define __arm_vshlq_m_r(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39132 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39133 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshlq_m_r_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1, p2), \
39134 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshlq_m_r_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1, p2), \
39135 int (*)[__ARM_mve_type_int32x4_t]: __arm_vshlq_m_r_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1, p2), \
39136 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshlq_m_r_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1, p2), \
39137 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshlq_m_r_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1, p2), \
39138 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vshlq_m_r_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1, p2));})
39140 #define __arm_vsliq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39141 __typeof(p1) __p1 = (p1); \
39142 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39143 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vsliq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39144 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vsliq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39145 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vsliq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39146 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vsliq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
39147 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vsliq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39148 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vsliq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39150 #define __arm_vsriq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39151 __typeof(p1) __p1 = (p1); \
39152 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39153 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vsriq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39154 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vsriq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39155 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vsriq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39156 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vsriq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
39157 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vsriq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39158 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vsriq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39160 #define __arm_vqrdmlashq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39161 __typeof(p1) __p1 = (p1); \
39162 __typeof(p2) __p2 = (p2); \
39163 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39164 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqrdmlashq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int)), \
39165 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqrdmlashq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int)), \
39166 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqrdmlashq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int)));})
39168 #define __arm_vqdmlashq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39169 __typeof(p1) __p1 = (p1); \
39170 __typeof(p2) __p2 = (p2); \
39171 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39172 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqdmlashq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int)), \
39173 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqdmlashq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int)), \
39174 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqdmlashq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int)));})
39176 #define __arm_vqrdmlahq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39177 __typeof(p1) __p1 = (p1); \
39178 __typeof(p2) __p2 = (p2); \
39179 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39180 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqrdmlahq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int)), \
39181 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqrdmlahq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int)), \
39182 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqrdmlahq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int)));})
39184 #define __arm_vqrdmladhxq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39185 __typeof(p1) __p1 = (p1); \
39186 __typeof(p2) __p2 = (p2); \
39187 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39188 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrdmladhxq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
39189 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrdmladhxq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
39190 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrdmladhxq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
39192 #define __arm_vqrdmladhq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39193 __typeof(p1) __p1 = (p1); \
39194 __typeof(p2) __p2 = (p2); \
39195 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39196 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrdmladhq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
39197 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrdmladhq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
39198 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrdmladhq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
39200 #define __arm_vqnegq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39201 __typeof(p1) __p1 = (p1); \
39202 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39203 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqnegq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39204 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqnegq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39205 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqnegq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
39207 #define __arm_vqdmlsdhxq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39208 __typeof(p1) __p1 = (p1); \
39209 __typeof(p2) __p2 = (p2); \
39210 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39211 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqdmlsdhxq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
39212 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmlsdhxq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
39213 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmlsdhxq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
39215 #define __arm_vabsq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39216 __typeof(p1) __p1 = (p1); \
39217 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39218 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vabsq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39219 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vabsq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39220 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vabsq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
39222 #define __arm_vclsq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39223 __typeof(p1) __p1 = (p1); \
39224 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39225 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vclsq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39226 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vclsq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39227 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vclsq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
39229 #define __arm_vclzq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39230 __typeof(p1) __p1 = (p1); \
39231 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39232 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vclzq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39233 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vclzq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39234 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vclzq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39235 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vclzq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
39236 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vclzq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39237 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vclzq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39239 #define __arm_vcmpgeq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39240 __typeof(p1) __p1 = (p1); \
39241 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39242 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpgeq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39243 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpgeq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39244 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpgeq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39245 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpgeq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int), p2), \
39246 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpgeq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int), p2), \
39247 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpgeq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int), p2));})
39250 #define __arm_vcmpgtq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39251 __typeof(p1) __p1 = (p1); \
39252 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39253 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpgtq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39254 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpgtq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39255 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpgtq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39256 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpgtq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int), p2), \
39257 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpgtq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int), p2), \
39258 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpgtq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int), p2));})
39260 #define __arm_vcmpleq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39261 __typeof(p1) __p1 = (p1); \
39262 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39263 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpleq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39264 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpleq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39265 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpleq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39266 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpleq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int), p2), \
39267 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpleq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int), p2), \
39268 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpleq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int), p2));})
39270 #define __arm_vcmpltq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39271 __typeof(p1) __p1 = (p1); \
39272 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39273 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpltq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39274 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpltq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39275 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpltq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39276 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpltq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int), p2), \
39277 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpltq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int), p2), \
39278 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpltq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int), p2));})
39280 #define __arm_vcmpneq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39281 __typeof(p1) __p1 = (p1); \
39282 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39283 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcmpneq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39284 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcmpneq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39285 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcmpneq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39286 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcmpneq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
39287 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcmpneq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39288 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcmpneq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
39289 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce3(p1, int), p2), \
39290 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce3(p1, int), p2), \
39291 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce3(p1, int), p2), \
39292 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int), p2), \
39293 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int), p2), \
39294 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vcmpneq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int), p2));})
39296 #define __arm_vdupq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39297 __typeof(p1) __p1 = (p1); \
39298 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39299 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vdupq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), (int8_t) __p1, p2), \
39300 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vdupq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), (int16_t) __p1, p2), \
39301 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vdupq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), (int32_t) __p1, p2), \
39302 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vdupq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), (uint8_t) __p1, p2), \
39303 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vdupq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), (uint16_t) __p1, p2), \
39304 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vdupq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), (uint32_t) __p1, p2));})
39306 #define __arm_vmaxaq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39307 __typeof(p1) __p1 = (p1); \
39308 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39309 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmaxaq_m_s8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39310 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmaxaq_m_s16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39311 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmaxaq_m_s32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
39313 #define __arm_vmlaq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39314 __typeof(p1) __p1 = (p1); \
39315 __typeof(p2) __p2 = (p2); \
39316 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39317 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vmlaq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int)), \
39318 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vmlaq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int)), \
39319 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vmlaq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int)), \
39320 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vmlaq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int)), \
39321 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vmlaq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int)), \
39322 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vmlaq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int)));})
39324 #define __arm_vmlasq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39325 __typeof(p1) __p1 = (p1); \
39326 __typeof(p2) __p2 = (p2); \
39327 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39328 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vmlasq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int)), \
39329 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vmlasq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int)), \
39330 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vmlasq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int)), \
39331 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vmlasq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int)), \
39332 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vmlasq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int)), \
39333 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vmlasq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int)));})
39335 #define __arm_vnegq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39336 __typeof(p1) __p1 = (p1); \
39337 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39338 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vnegq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39339 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vnegq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39340 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vnegq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
39342 #define __arm_vpselq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39343 __typeof(p1) __p1 = (p1); \
39344 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39345 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vpselq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39346 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vpselq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39347 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vpselq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39348 int (*)[__ARM_mve_type_int64x2_t][__ARM_mve_type_int64x2_t]: __arm_vpselq_s64 (__ARM_mve_coerce(__p0, int64x2_t), __ARM_mve_coerce(__p1, int64x2_t), p2), \
39349 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vpselq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
39350 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vpselq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39351 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vpselq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
39352 int (*)[__ARM_mve_type_uint64x2_t][__ARM_mve_type_uint64x2_t]: __arm_vpselq_u64 (__ARM_mve_coerce(__p0, uint64x2_t), __ARM_mve_coerce(__p1, uint64x2_t), p2));})
39354 #define __arm_vqdmlahq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39355 __typeof(p1) __p1 = (p1); \
39356 __typeof(p2) __p2 = (p2); \
39357 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39358 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqdmlahq_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int)), \
39359 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqdmlahq_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int)), \
39360 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqdmlahq_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int)));})
39362 #define __arm_vqdmlsdhq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39363 __typeof(p1) __p1 = (p1); \
39364 __typeof(p2) __p2 = (p2); \
39365 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39366 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqdmlsdhq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
39367 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmlsdhq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
39368 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmlsdhq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
39370 #define __arm_vqdmladhxq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39371 __typeof(p1) __p1 = (p1); \
39372 __typeof(p2) __p2 = (p2); \
39373 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39374 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqdmladhxq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
39375 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmladhxq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
39376 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmladhxq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
39378 #define __arm_vqdmladhq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39379 __typeof(p1) __p1 = (p1); \
39380 __typeof(p2) __p2 = (p2); \
39381 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39382 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqdmladhq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
39383 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmladhq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
39384 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmladhq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
39386 #define __arm_vminaq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39387 __typeof(p1) __p1 = (p1); \
39388 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39389 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vminaq_m_s8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39390 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vminaq_m_s16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39391 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vminaq_m_s32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
39393 #define __arm_vmovlbq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39394 __typeof(p1) __p1 = (p1); \
39395 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39396 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int8x16_t]: __arm_vmovlbq_m_s8 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39397 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int16x8_t]: __arm_vmovlbq_m_s16 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39398 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint8x16_t]: __arm_vmovlbq_m_u8 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
39399 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint16x8_t]: __arm_vmovlbq_m_u16 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint16x8_t), p2));})
39401 #define __arm_vmovnbq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39402 __typeof(p1) __p1 = (p1); \
39403 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39404 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vmovnbq_m_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39405 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vmovnbq_m_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39406 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vmovnbq_m_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39407 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vmovnbq_m_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39409 #define __arm_vmovntq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39410 __typeof(p1) __p1 = (p1); \
39411 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39412 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vmovntq_m_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39413 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vmovntq_m_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39414 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vmovntq_m_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39415 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vmovntq_m_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39417 #define __arm_vshrnbq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39418 __typeof(p1) __p1 = (p1); \
39419 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39420 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vshrnbq_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39421 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vshrnbq_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39422 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vshrnbq_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39423 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vshrnbq_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39425 #define __arm_vrshrnbq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39426 __typeof(p1) __p1 = (p1); \
39427 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39428 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vrshrnbq_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39429 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vrshrnbq_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39430 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vrshrnbq_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39431 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vrshrnbq_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39433 #define __arm_vrev32q_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39434 __typeof(p1) __p1 = (p1); \
39435 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39436 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrev32q_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39437 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrev32q_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39438 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vrev32q_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
39439 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vrev32q_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2));})
39441 #define __arm_vqshruntq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39442 __typeof(p1) __p1 = (p1); \
39443 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39444 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqshruntq_n_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39445 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqshruntq_n_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
39447 #define __arm_vrev16q_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39448 __typeof(p1) __p1 = (p1); \
39449 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39450 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrev16q_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39451 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vrev16q_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2));})
39453 #define __arm_vqshrntq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39454 __typeof(p1) __p1 = (p1); \
39455 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39456 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqshrntq_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39457 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqshrntq_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39458 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqshrntq_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39459 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqshrntq_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39461 #define __arm_vqrshruntq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39462 __typeof(p1) __p1 = (p1); \
39463 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39464 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqrshruntq_n_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39465 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqrshruntq_n_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
39467 #define __arm_vqrshrntq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39468 __typeof(p1) __p1 = (p1); \
39469 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39470 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqrshrntq_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39471 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqrshrntq_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39472 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqrshrntq_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39473 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqrshrntq_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39475 #define __arm_vqshrnbq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39476 __typeof(p1) __p1 = (p1); \
39477 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39478 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqshrnbq_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39479 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqshrnbq_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39480 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqshrnbq_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39481 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqshrnbq_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39483 #define __arm_vqmovuntq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39484 __typeof(p1) __p1 = (p1); \
39485 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39486 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqmovuntq_m_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39487 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqmovuntq_m_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
39489 #define __arm_vqmovntq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39490 __typeof(p1) __p1 = (p1); \
39491 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39492 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqmovntq_m_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39493 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqmovntq_m_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39494 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqmovntq_m_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39495 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqmovntq_m_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39497 #define __arm_vqmovnbq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39498 __typeof(p1) __p1 = (p1); \
39499 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39500 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqmovnbq_m_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39501 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqmovnbq_m_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39502 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqmovnbq_m_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39503 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqmovnbq_m_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39505 #define __arm_vmovltq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39506 __typeof(p1) __p1 = (p1); \
39507 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39508 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int8x16_t]: __arm_vmovltq_m_s8 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39509 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int16x8_t]: __arm_vmovltq_m_s16 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39510 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint8x16_t]: __arm_vmovltq_m_u8 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
39511 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint16x8_t]: __arm_vmovltq_m_u16 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint16x8_t), p2));})
39513 #define __arm_vqmovunbq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39514 __typeof(p1) __p1 = (p1); \
39515 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39516 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqmovunbq_m_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39517 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqmovunbq_m_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
39519 #define __arm_vsubq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
39520 __typeof(p1) __p1 = (p1); \
39521 __typeof(p2) __p2 = (p2); \
39522 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39523 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vsubq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
39524 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vsubq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
39525 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vsubq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
39526 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vsubq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
39527 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vsubq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
39528 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vsubq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3), \
39529 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vsubq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
39530 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vsubq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
39531 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vsubq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
39532 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vsubq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
39533 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vsubq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
39534 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vsubq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39536 #define __arm_vabavq_p(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
39537 __typeof(p1) __p1 = (p1); \
39538 __typeof(p2) __p2 = (p2); \
39539 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39540 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vabavq_p_s8(__p0, __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
39541 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vabavq_p_s16(__p0, __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
39542 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vabavq_p_s32(__p0, __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
39543 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vabavq_p_u8(__p0, __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
39544 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vabavq_p_u16(__p0, __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
39545 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vabavq_p_u32(__p0, __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39547 #define __arm_vabdq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
39548 __typeof(p1) __p1 = (p1); \
39549 __typeof(p2) __p2 = (p2); \
39550 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39551 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vabdq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
39552 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vabdq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
39553 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vabdq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
39554 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vabdq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
39555 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vabdq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
39556 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vabdq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39558 #define __arm_vandq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
39559 __typeof(p1) __p1 = (p1); \
39560 __typeof(p2) __p2 = (p2); \
39561 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39562 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vandq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
39563 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vandq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
39564 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vandq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
39565 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vandq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
39566 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vandq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
39567 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vandq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39569 #define __arm_vbicq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
39570 __typeof(p1) __p1 = (p1); \
39571 __typeof(p2) __p2 = (p2); \
39572 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39573 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vbicq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
39574 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vbicq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
39575 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vbicq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
39576 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vbicq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
39577 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vbicq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
39578 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vbicq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39580 #define __arm_vbrsrq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
39581 __typeof(p1) __p1 = (p1); \
39582 __typeof(p2) __p2 = (p2); \
39583 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
39584 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vbrsrq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __p2, p3), \
39585 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vbrsrq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __p2, p3), \
39586 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vbrsrq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __p2, p3), \
39587 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vbrsrq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __p2, p3), \
39588 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vbrsrq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __p2, p3), \
39589 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vbrsrq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __p2, p3));})
39591 #define __arm_vcaddq_rot270_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
39592 __typeof(p1) __p1 = (p1); \
39593 __typeof(p2) __p2 = (p2); \
39594 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39595 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcaddq_rot270_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
39596 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcaddq_rot270_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
39597 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcaddq_rot270_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
39598 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcaddq_rot270_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
39599 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcaddq_rot270_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
39600 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcaddq_rot270_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39602 #define __arm_vcaddq_rot90_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
39603 __typeof(p1) __p1 = (p1); \
39604 __typeof(p2) __p2 = (p2); \
39605 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39606 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcaddq_rot90_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
39607 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcaddq_rot90_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
39608 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcaddq_rot90_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
39609 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcaddq_rot90_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
39610 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcaddq_rot90_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
39611 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcaddq_rot90_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39613 #define __arm_veorq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
39614 __typeof(p1) __p1 = (p1); \
39615 __typeof(p2) __p2 = (p2); \
39616 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39617 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_veorq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
39618 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_veorq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
39619 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_veorq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
39620 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_veorq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
39621 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_veorq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
39622 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_veorq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39624 #define __arm_vmladavaq_p(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
39625 __typeof(p1) __p1 = (p1); \
39626 __typeof(p2) __p2 = (p2); \
39627 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39628 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmladavaq_p_s8 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
39629 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmladavaq_p_s16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
39630 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmladavaq_p_s32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
39631 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmladavaq_p_u8 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
39632 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmladavaq_p_u16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
39633 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmladavaq_p_u32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39635 #define __arm_vornq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
39636 __typeof(p1) __p1 = (p1); \
39637 __typeof(p2) __p2 = (p2); \
39638 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39639 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vornq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
39640 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vornq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
39641 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vornq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
39642 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vornq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
39643 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vornq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
39644 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vornq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39646 #define __arm_vorrq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
39647 __typeof(p1) __p1 = (p1); \
39648 __typeof(p2) __p2 = (p2); \
39649 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39650 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vorrq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
39651 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vorrq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
39652 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vorrq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
39653 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vorrq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
39654 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vorrq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
39655 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vorrq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39657 #define __arm_vaddq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
39658 __typeof(p1) __p1 = (p1); \
39659 __typeof(p2) __p2 = (p2); \
39660 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39661 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vaddq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
39662 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vaddq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
39663 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vaddq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
39664 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vaddq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
39665 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vaddq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
39666 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vaddq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3), \
39667 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vaddq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
39668 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vaddq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
39669 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vaddq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
39670 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vaddq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
39671 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vaddq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
39672 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vaddq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39674 #define __arm_vmulq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
39675 __typeof(p1) __p1 = (p1); \
39676 __typeof(p2) __p2 = (p2); \
39677 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39678 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vmulq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
39679 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vmulq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
39680 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vmulq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
39681 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vmulq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
39682 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vmulq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
39683 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vmulq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3), \
39684 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmulq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
39685 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmulq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
39686 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmulq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
39687 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
39688 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
39689 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmulq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39691 #define __arm_vstrwq_scatter_base(p0,p1,p2) ({ __typeof(p2) __p2 = (p2); \
39692 _Generic( (int (*)[__ARM_mve_typeid(__p2)])0, \
39693 int (*)[__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_base_s32(p0, p1, __ARM_mve_coerce(__p2, int32x4_t)), \
39694 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_base_u32(p0, p1, __ARM_mve_coerce(__p2, uint32x4_t)));})
39696 #define __arm_vldrbq_gather_offset(p0,p1) ({ __typeof(p1) __p1 = (p1); \
39697 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
39698 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_uint8x16_t]: __arm_vldrbq_gather_offset_s8 (__ARM_mve_coerce1(p0, int8_t *), __ARM_mve_coerce(__p1, uint8x16_t)), \
39699 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrbq_gather_offset_s16 (__ARM_mve_coerce1(p0, int8_t *), __ARM_mve_coerce(__p1, uint16x8_t)), \
39700 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrbq_gather_offset_s32 (__ARM_mve_coerce1(p0, int8_t *), __ARM_mve_coerce(__p1, uint32x4_t)), \
39701 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint8x16_t]: __arm_vldrbq_gather_offset_u8 (__ARM_mve_coerce1(p0, uint8_t *), __ARM_mve_coerce(__p1, uint8x16_t)), \
39702 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrbq_gather_offset_u16 (__ARM_mve_coerce1(p0, uint8_t *), __ARM_mve_coerce(__p1, uint16x8_t)), \
39703 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrbq_gather_offset_u32 (__ARM_mve_coerce1(p0, uint8_t *), __ARM_mve_coerce(__p1, uint32x4_t)));})
39705 #define __arm_vstrwq_scatter_base_p(p0,p1,p2,p3) ({ __typeof(p2) __p2 = (p2); \
39706 _Generic( (int (*)[__ARM_mve_typeid(__p2)])0, \
39707 int (*)[__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_base_p_s32 (p0, p1, __ARM_mve_coerce(__p2, int32x4_t), p3), \
39708 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_base_p_u32 (p0, p1, __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39710 #define __arm_vld1q(p0) (\
39711 _Generic( (int (*)[__ARM_mve_typeid(p0)])0, \
39712 int (*)[__ARM_mve_type_int8_t_ptr]: __arm_vld1q_s8 (__ARM_mve_coerce1(p0, int8_t *)), \
39713 int (*)[__ARM_mve_type_int16_t_ptr]: __arm_vld1q_s16 (__ARM_mve_coerce1(p0, int16_t *)), \
39714 int (*)[__ARM_mve_type_int32_t_ptr]: __arm_vld1q_s32 (__ARM_mve_coerce1(p0, int32_t *)), \
39715 int (*)[__ARM_mve_type_uint8_t_ptr]: __arm_vld1q_u8 (__ARM_mve_coerce1(p0, uint8_t *)), \
39716 int (*)[__ARM_mve_type_uint16_t_ptr]: __arm_vld1q_u16 (__ARM_mve_coerce1(p0, uint16_t *)), \
39717 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vld1q_u32 (__ARM_mve_coerce1(p0, uint32_t *))))
39719 #define __arm_vldrhq_gather_offset(p0,p1) ({ __typeof(p1) __p1 = (p1); \
39720 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
39721 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_offset_s16 (__ARM_mve_coerce1(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t)), \
39722 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrhq_gather_offset_s32 (__ARM_mve_coerce1(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t)), \
39723 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_offset_u16 (__ARM_mve_coerce1(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t)), \
39724 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrhq_gather_offset_u32 (__ARM_mve_coerce1(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t)));})
39726 #define __arm_vldrhq_gather_offset_z(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
39727 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
39728 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_offset_z_s16 (__ARM_mve_coerce1(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39729 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrhq_gather_offset_z_s32 (__ARM_mve_coerce1(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
39730 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_offset_z_u16 (__ARM_mve_coerce1(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39731 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrhq_gather_offset_z_u32 (__ARM_mve_coerce1(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39733 #define __arm_vldrhq_gather_shifted_offset(p0,p1) ({ __typeof(p1) __p1 = (p1); \
39734 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
39735 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_shifted_offset_s16 (__ARM_mve_coerce1(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t)), \
39736 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrhq_gather_shifted_offset_s32 (__ARM_mve_coerce1(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t)), \
39737 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_shifted_offset_u16 (__ARM_mve_coerce1(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t)), \
39738 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrhq_gather_shifted_offset_u32 (__ARM_mve_coerce1(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t)));})
39740 #define __arm_vldrhq_gather_shifted_offset_z(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
39741 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
39742 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_shifted_offset_z_s16 (__ARM_mve_coerce1(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39743 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrhq_gather_shifted_offset_z_s32 (__ARM_mve_coerce1(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
39744 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrhq_gather_shifted_offset_z_u16 (__ARM_mve_coerce1(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39745 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrhq_gather_shifted_offset_z_u32 (__ARM_mve_coerce1(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39747 #define __arm_vldrwq_gather_offset(p0,p1) ({ __typeof(p0) __p0 = (p0); \
39748 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39749 int (*)[__ARM_mve_type_int32_t_ptr]: __arm_vldrwq_gather_offset_s32 (__ARM_mve_coerce1(__p0, int32_t *), p1), \
39750 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vldrwq_gather_offset_u32 (__ARM_mve_coerce1(__p0, uint32_t *), p1));})
39752 #define __arm_vldrwq_gather_offset_z(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39753 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39754 int (*)[__ARM_mve_type_int32_t_ptr]: __arm_vldrwq_gather_offset_z_s32 (__ARM_mve_coerce1(__p0, int32_t *), p1, p2), \
39755 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vldrwq_gather_offset_z_u32 (__ARM_mve_coerce1(__p0, uint32_t *), p1, p2));})
39757 #define __arm_vldrwq_gather_shifted_offset(p0,p1) ({ __typeof(p0) __p0 = (p0); \
39758 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39759 int (*)[__ARM_mve_type_int32_t_ptr]: __arm_vldrwq_gather_shifted_offset_s32 (__ARM_mve_coerce1(__p0, int32_t *), p1), \
39760 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vldrwq_gather_shifted_offset_u32 (__ARM_mve_coerce1(__p0, uint32_t *), p1));})
39762 #define __arm_vldrwq_gather_shifted_offset_z(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39763 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39764 int (*)[__ARM_mve_type_int32_t_ptr]: __arm_vldrwq_gather_shifted_offset_z_s32 (__ARM_mve_coerce1(__p0, int32_t *), p1, p2), \
39765 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vldrwq_gather_shifted_offset_z_u32 (__ARM_mve_coerce1(__p0, uint32_t *), p1, p2));})
39767 #define __arm_vst1q(p0,p1) ({ __typeof(p1) __p1 = (p1); \
39768 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
39769 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_int8x16_t]: __arm_vst1q_s8 (__ARM_mve_coerce(p0, int8_t *), __ARM_mve_coerce(__p1, int8x16_t)), \
39770 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_int16x8_t]: __arm_vst1q_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, int16x8_t)), \
39771 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vst1q_s32 (__ARM_mve_coerce(p0, int32_t *), __ARM_mve_coerce(__p1, int32x4_t)), \
39772 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint8x16_t]: __arm_vst1q_u8 (__ARM_mve_coerce(p0, uint8_t *), __ARM_mve_coerce(__p1, uint8x16_t)), \
39773 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vst1q_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t)), \
39774 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vst1q_u32 (__ARM_mve_coerce(p0, uint32_t *), __ARM_mve_coerce(__p1, uint32x4_t)));})
39776 #define __arm_vst1q_p(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
39777 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
39778 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_int8x16_t]: __arm_vst1q_p_s8 (__ARM_mve_coerce(p0, int8_t *), __ARM_mve_coerce(__p1, int8x16_t), p2), \
39779 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_int16x8_t]: __arm_vst1q_p_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39780 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vst1q_p_s32 (__ARM_mve_coerce(p0, int32_t *), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39781 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint8x16_t]: __arm_vst1q_p_u8 (__ARM_mve_coerce(p0, uint8_t *), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
39782 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vst1q_p_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39783 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vst1q_p_u32 (__ARM_mve_coerce(p0, uint32_t *), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39785 #define __arm_vst2q(p0,p1) ({ __typeof(p1) __p1 = (p1); \
39786 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
39787 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_int8x16x2_t]: __arm_vst2q_s8 (__ARM_mve_coerce(p0, int8_t *), __ARM_mve_coerce(__p1, int8x16x2_t)), \
39788 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_int16x8x2_t]: __arm_vst2q_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, int16x8x2_t)), \
39789 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4x2_t]: __arm_vst2q_s32 (__ARM_mve_coerce(p0, int32_t *), __ARM_mve_coerce(__p1, int32x4x2_t)), \
39790 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint8x16x2_t]: __arm_vst2q_u8 (__ARM_mve_coerce(p0, uint8_t *), __ARM_mve_coerce(__p1, uint8x16x2_t)), \
39791 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8x2_t]: __arm_vst2q_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8x2_t)), \
39792 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4x2_t]: __arm_vst2q_u32 (__ARM_mve_coerce(p0, uint32_t *), __ARM_mve_coerce(__p1, uint32x4x2_t)));})
39794 #define __arm_vstrhq(p0,p1) ({ __typeof(p1) __p1 = (p1); \
39795 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
39796 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_int16x8_t]: __arm_vstrhq_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, int16x8_t)), \
39797 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrhq_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, int32x4_t)), \
39798 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t)), \
39799 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t)));})
39801 #define __arm_vstrhq_p(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
39802 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
39803 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_int16x8_t]: __arm_vstrhq_p_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, int16x8_t), p2), \
39804 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrhq_p_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39805 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_p_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
39806 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_p_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39808 #define __arm_vstrhq_scatter_offset_p(p0,p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
39809 __typeof(p2) __p2 = (p2); \
39810 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39811 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrhq_scatter_offset_p_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
39812 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrhq_scatter_offset_p_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
39813 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_scatter_offset_p_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
39814 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_scatter_offset_p_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39816 #define __arm_vstrhq_scatter_offset(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
39817 __typeof(p2) __p2 = (p2); \
39818 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39819 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrhq_scatter_offset_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
39820 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrhq_scatter_offset_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t)), \
39821 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_scatter_offset_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t)), \
39822 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_scatter_offset_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t)));})
39824 #define __arm_vstrhq_scatter_shifted_offset_p(p0,p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
39825 __typeof(p2) __p2 = (p2); \
39826 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39827 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrhq_scatter_shifted_offset_p_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
39828 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrhq_scatter_shifted_offset_p_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
39829 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_scatter_shifted_offset_p_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
39830 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_scatter_shifted_offset_p_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39832 #define __arm_vstrhq_scatter_shifted_offset(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
39833 __typeof(p2) __p2 = (p2); \
39834 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39835 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrhq_scatter_shifted_offset_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
39836 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrhq_scatter_shifted_offset_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t)), \
39837 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_scatter_shifted_offset_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t)), \
39838 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_scatter_shifted_offset_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t)));})
39841 #define __arm_vstrwq(p0,p1) ({ __typeof(p1) __p1 = (p1); \
39842 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
39843 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrwq_s32 (__ARM_mve_coerce(p0, int32_t *), __ARM_mve_coerce(__p1, int32x4_t)), \
39844 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrwq_u32 (__ARM_mve_coerce(p0, uint32_t *), __ARM_mve_coerce(__p1, uint32x4_t)));})
39846 #define __arm_vstrwq_p(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
39847 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
39848 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrwq_p_s32 (__ARM_mve_coerce(p0, int32_t *), __ARM_mve_coerce(__p1, int32x4_t), p2), \
39849 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrwq_p_u32 (__ARM_mve_coerce(p0, uint32_t *), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
39851 #define __arm_vstrdq_scatter_base_p(p0,p1,p2,p3) ({ __typeof(p2) __p2 = (p2); \
39852 _Generic( (int (*)[__ARM_mve_typeid(__p2)])0, \
39853 int (*)[__ARM_mve_type_int64x2_t]: __arm_vstrdq_scatter_base_p_s64 (p0, p1, __ARM_mve_coerce(__p2, int64x2_t), p3), \
39854 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vstrdq_scatter_base_p_u64 (p0, p1, __ARM_mve_coerce(__p2, uint64x2_t), p3));})
39856 #define __arm_vstrdq_scatter_base(p0,p1,p2) ({ __typeof(p2) __p2 = (p2); \
39857 _Generic( (int (*)[__ARM_mve_typeid(__p2)])0, \
39858 int (*)[__ARM_mve_type_int64x2_t]: __arm_vstrdq_scatter_base_s64 (p0, p1, __ARM_mve_coerce(__p2, int64x2_t)), \
39859 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vstrdq_scatter_base_u64 (p0, p1, __ARM_mve_coerce(__p2, uint64x2_t)));})
39861 #define __arm_vstrhq_scatter_offset(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
39862 __typeof(p2) __p2 = (p2); \
39863 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39864 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrhq_scatter_offset_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
39865 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrhq_scatter_offset_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t)), \
39866 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_scatter_offset_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t)), \
39867 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_scatter_offset_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t)));})
39869 #define __arm_vstrhq_scatter_offset_p(p0,p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
39870 __typeof(p2) __p2 = (p2); \
39871 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39872 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrhq_scatter_offset_p_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
39873 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrhq_scatter_offset_p_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
39874 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_scatter_offset_p_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
39875 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_scatter_offset_p_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39877 #define __arm_vstrhq_scatter_shifted_offset(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
39878 __typeof(p2) __p2 = (p2); \
39879 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39880 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrhq_scatter_shifted_offset_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
39881 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrhq_scatter_shifted_offset_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t)), \
39882 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_scatter_shifted_offset_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t)), \
39883 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_scatter_shifted_offset_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t)));})
39885 #define __arm_vstrhq_scatter_shifted_offset_p(p0,p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
39886 __typeof(p2) __p2 = (p2); \
39887 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
39888 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrhq_scatter_shifted_offset_p_s16 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
39889 int (*)[__ARM_mve_type_int16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrhq_scatter_shifted_offset_p_s32 (__ARM_mve_coerce(p0, int16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
39890 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrhq_scatter_shifted_offset_p_u16 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
39891 int (*)[__ARM_mve_type_uint16_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrhq_scatter_shifted_offset_p_u32 (__ARM_mve_coerce(p0, uint16_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39893 #define __arm_vstrwq_scatter_offset(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
39894 __typeof(p2) __p2 = (p2); \
39895 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p2)])0, \
39896 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_offset_s32 (__ARM_mve_coerce(__p0, int32_t *), p1, __ARM_mve_coerce(__p2, int32x4_t)), \
39897 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_offset_u32 (__ARM_mve_coerce(__p0, uint32_t *), p1, __ARM_mve_coerce(__p2, uint32x4_t)));})
39899 #define __arm_vstrwq_scatter_offset_p(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
39900 __typeof(p2) __p2 = (p2); \
39901 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p2)])0, \
39902 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_offset_p_s32 (__ARM_mve_coerce(__p0, int32_t *), p1, __ARM_mve_coerce(__p2, int32x4_t), p3), \
39903 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_offset_p_u32 (__ARM_mve_coerce(__p0, uint32_t *), p1, __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39905 #define __arm_vstrwq_scatter_shifted_offset(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
39906 __typeof(p2) __p2 = (p2); \
39907 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p2)])0, \
39908 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_shifted_offset_s32 (__ARM_mve_coerce(p0, int32_t *), __p1, __ARM_mve_coerce(__p2, int32x4_t)), \
39909 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_shifted_offset_u32 (__ARM_mve_coerce(p0, uint32_t *), __p1, __ARM_mve_coerce(__p2, uint32x4_t)));})
39911 #define __arm_vstrwq_scatter_shifted_offset_p(p0,p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
39912 __typeof(p2) __p2 = (p2); \
39913 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p2)])0, \
39914 int (*)[__ARM_mve_type_int32_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrwq_scatter_shifted_offset_p_s32 (__ARM_mve_coerce(p0, int32_t *), __p1, __ARM_mve_coerce(__p2, int32x4_t), p3), \
39915 int (*)[__ARM_mve_type_uint32_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrwq_scatter_shifted_offset_p_u32 (__ARM_mve_coerce(p0, uint32_t *), __p1, __ARM_mve_coerce(__p2, uint32x4_t), p3));})
39917 #define __arm_vuninitializedq(p0) ({ __typeof(p0) __p0 = (p0); \
39918 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39919 int (*)[__ARM_mve_type_int8x16_t]: __arm_vuninitializedq_s8 (), \
39920 int (*)[__ARM_mve_type_int16x8_t]: __arm_vuninitializedq_s16 (), \
39921 int (*)[__ARM_mve_type_int32x4_t]: __arm_vuninitializedq_s32 (), \
39922 int (*)[__ARM_mve_type_int64x2_t]: __arm_vuninitializedq_s64 (), \
39923 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vuninitializedq_u8 (), \
39924 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vuninitializedq_u16 (), \
39925 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vuninitializedq_u32 (), \
39926 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vuninitializedq_u64 ());})
39928 #define __arm_vreinterpretq_s16(p0) ({ __typeof(p0) __p0 = (p0); \
39929 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39930 int (*)[__ARM_mve_type_int8x16_t]: __arm_vreinterpretq_s16_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
39931 int (*)[__ARM_mve_type_int32x4_t]: __arm_vreinterpretq_s16_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
39932 int (*)[__ARM_mve_type_int64x2_t]: __arm_vreinterpretq_s16_s64 (__ARM_mve_coerce(__p0, int64x2_t)), \
39933 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vreinterpretq_s16_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
39934 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vreinterpretq_s16_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
39935 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vreinterpretq_s16_u32 (__ARM_mve_coerce(__p0, uint32x4_t)), \
39936 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vreinterpretq_s16_u64 (__ARM_mve_coerce(__p0, uint64x2_t)));})
39938 #define __arm_vreinterpretq_s32(p0) ({ __typeof(p0) __p0 = (p0); \
39939 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39940 int (*)[__ARM_mve_type_int16x8_t]: __arm_vreinterpretq_s32_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
39941 int (*)[__ARM_mve_type_int8x16_t]: __arm_vreinterpretq_s32_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
39942 int (*)[__ARM_mve_type_int64x2_t]: __arm_vreinterpretq_s32_s64 (__ARM_mve_coerce(__p0, int64x2_t)), \
39943 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vreinterpretq_s32_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
39944 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vreinterpretq_s32_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
39945 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vreinterpretq_s32_u32 (__ARM_mve_coerce(__p0, uint32x4_t)), \
39946 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vreinterpretq_s32_u64 (__ARM_mve_coerce(__p0, uint64x2_t)));})
39948 #define __arm_vreinterpretq_s64(p0) ({ __typeof(p0) __p0 = (p0); \
39949 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39950 int (*)[__ARM_mve_type_int16x8_t]: __arm_vreinterpretq_s64_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
39951 int (*)[__ARM_mve_type_int32x4_t]: __arm_vreinterpretq_s64_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
39952 int (*)[__ARM_mve_type_int8x16_t]: __arm_vreinterpretq_s64_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
39953 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vreinterpretq_s64_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
39954 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vreinterpretq_s64_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
39955 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vreinterpretq_s64_u32 (__ARM_mve_coerce(__p0, uint32x4_t)), \
39956 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vreinterpretq_s64_u64 (__ARM_mve_coerce(__p0, uint64x2_t)));})
39958 #define __arm_vreinterpretq_s8(p0) ({ __typeof(p0) __p0 = (p0); \
39959 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39960 int (*)[__ARM_mve_type_int16x8_t]: __arm_vreinterpretq_s8_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
39961 int (*)[__ARM_mve_type_int32x4_t]: __arm_vreinterpretq_s8_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
39962 int (*)[__ARM_mve_type_int64x2_t]: __arm_vreinterpretq_s8_s64 (__ARM_mve_coerce(__p0, int64x2_t)), \
39963 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vreinterpretq_s8_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
39964 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vreinterpretq_s8_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
39965 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vreinterpretq_s8_u32 (__ARM_mve_coerce(__p0, uint32x4_t)), \
39966 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vreinterpretq_s8_u64 (__ARM_mve_coerce(__p0, uint64x2_t)));})
39968 #define __arm_vreinterpretq_u16(p0) ({ __typeof(p0) __p0 = (p0); \
39969 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39970 int (*)[__ARM_mve_type_int8x16_t]: __arm_vreinterpretq_u16_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
39971 int (*)[__ARM_mve_type_int32x4_t]: __arm_vreinterpretq_u16_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
39972 int (*)[__ARM_mve_type_int64x2_t]: __arm_vreinterpretq_u16_s64 (__ARM_mve_coerce(__p0, int64x2_t)), \
39973 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vreinterpretq_u16_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
39974 int (*)[__ARM_mve_type_int16x8_t]: __arm_vreinterpretq_u16_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
39975 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vreinterpretq_u16_u32 (__ARM_mve_coerce(__p0, uint32x4_t)), \
39976 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vreinterpretq_u16_u64 (__ARM_mve_coerce(__p0, uint64x2_t)));})
39978 #define __arm_vreinterpretq_u32(p0) ({ __typeof(p0) __p0 = (p0); \
39979 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39980 int (*)[__ARM_mve_type_int16x8_t]: __arm_vreinterpretq_u32_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
39981 int (*)[__ARM_mve_type_int8x16_t]: __arm_vreinterpretq_u32_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
39982 int (*)[__ARM_mve_type_int64x2_t]: __arm_vreinterpretq_u32_s64 (__ARM_mve_coerce(__p0, int64x2_t)), \
39983 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vreinterpretq_u32_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
39984 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vreinterpretq_u32_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
39985 int (*)[__ARM_mve_type_int32x4_t]: __arm_vreinterpretq_u32_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
39986 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vreinterpretq_u32_u64 (__ARM_mve_coerce(__p0, uint64x2_t)));})
39988 #define __arm_vreinterpretq_u64(p0) ({ __typeof(p0) __p0 = (p0); \
39989 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
39990 int (*)[__ARM_mve_type_int16x8_t]: __arm_vreinterpretq_u64_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
39991 int (*)[__ARM_mve_type_int32x4_t]: __arm_vreinterpretq_u64_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
39992 int (*)[__ARM_mve_type_int8x16_t]: __arm_vreinterpretq_u64_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
39993 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vreinterpretq_u64_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
39994 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vreinterpretq_u64_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
39995 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vreinterpretq_u64_u32 (__ARM_mve_coerce(__p0, uint32x4_t)), \
39996 int (*)[__ARM_mve_type_int64x2_t]: __arm_vreinterpretq_u64_s64 (__ARM_mve_coerce(__p0, int64x2_t)));})
39998 #define __arm_vreinterpretq_u8(p0) ({ __typeof(p0) __p0 = (p0); \
39999 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
40000 int (*)[__ARM_mve_type_int16x8_t]: __arm_vreinterpretq_u8_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
40001 int (*)[__ARM_mve_type_int32x4_t]: __arm_vreinterpretq_u8_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
40002 int (*)[__ARM_mve_type_int64x2_t]: __arm_vreinterpretq_u8_s64 (__ARM_mve_coerce(__p0, int64x2_t)), \
40003 int (*)[__ARM_mve_type_int8x16_t]: __arm_vreinterpretq_u8_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
40004 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vreinterpretq_u8_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
40005 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vreinterpretq_u8_u32 (__ARM_mve_coerce(__p0, uint32x4_t)), \
40006 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vreinterpretq_u8_u64 (__ARM_mve_coerce(__p0, uint64x2_t)));})
40008 #define __arm_vabsq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
40009 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40010 int (*)[__ARM_mve_type_int8x16_t]: __arm_vabsq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2), \
40011 int (*)[__ARM_mve_type_int16x8_t]: __arm_vabsq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2), \
40012 int (*)[__ARM_mve_type_int32x4_t]: __arm_vabsq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), p2));})
40014 #define __arm_vaddq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40015 __typeof(p2) __p2 = (p2); \
40016 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40017 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vaddq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40018 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vaddq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40019 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vaddq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40020 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vaddq_x_n_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
40021 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vaddq_x_n_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
40022 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vaddq_x_n_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
40023 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vaddq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40024 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vaddq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40025 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vaddq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
40026 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vaddq_x_n_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
40027 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vaddq_x_n_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
40028 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vaddq_x_n_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3));})
40030 #define __arm_vcaddq_rot270_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40031 __typeof(p2) __p2 = (p2); \
40032 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40033 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcaddq_rot270_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40034 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcaddq_rot270_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40035 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcaddq_rot270_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40036 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcaddq_rot270_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40037 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcaddq_rot270_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40038 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcaddq_rot270_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40040 #define __arm_vcaddq_rot90_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40041 __typeof(p2) __p2 = (p2); \
40042 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40043 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vcaddq_rot90_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40044 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vcaddq_rot90_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40045 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vcaddq_rot90_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40046 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcaddq_rot90_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40047 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcaddq_rot90_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40048 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcaddq_rot90_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40050 #define __arm_veorq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40051 __typeof(p2) __p2 = (p2); \
40052 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40053 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_veorq_x_s8(__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40054 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_veorq_x_s16(__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40055 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_veorq_x_s32(__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40056 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_veorq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40057 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_veorq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40058 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_veorq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40060 #define __arm_vmovlbq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
40061 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40062 int (*)[__ARM_mve_type_int8x16_t]: __arm_vmovlbq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2), \
40063 int (*)[__ARM_mve_type_int16x8_t]: __arm_vmovlbq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2), \
40064 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vmovlbq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2), \
40065 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vmovlbq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2));})
40067 #define __arm_vmovltq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
40068 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40069 int (*)[__ARM_mve_type_int8x16_t]: __arm_vmovltq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2), \
40070 int (*)[__ARM_mve_type_int16x8_t]: __arm_vmovltq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2), \
40071 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vmovltq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2), \
40072 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vmovltq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2));})
40074 #define __arm_vmulhq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40075 __typeof(p2) __p2 = (p2); \
40076 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40077 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmulhq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40078 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmulhq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40079 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmulhq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40080 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulhq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40081 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulhq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40082 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmulhq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40084 #define __arm_vmullbq_int_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40085 __typeof(p2) __p2 = (p2); \
40086 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40087 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmullbq_int_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40088 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmullbq_int_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40089 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmullbq_int_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40090 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmullbq_int_x_u8( __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40091 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmullbq_int_x_u16( __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40092 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmullbq_int_x_u32( __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40094 #define __arm_vmullbq_poly_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40095 __typeof(p2) __p2 = (p2); \
40096 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40097 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmullbq_poly_x_p8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40098 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmullbq_poly_x_p16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3));})
40100 #define __arm_vmulltq_int_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40101 __typeof(p2) __p2 = (p2); \
40102 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40103 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmulltq_int_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40104 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmulltq_int_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40105 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmulltq_int_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40106 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulltq_int_x_u8( __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40107 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulltq_int_x_u16( __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40108 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmulltq_int_x_u32( __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40110 #define __arm_vmulltq_poly_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40111 __typeof(p2) __p2 = (p2); \
40112 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40113 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulltq_poly_x_p8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40114 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulltq_poly_x_p16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3));})
40116 #define __arm_vmulq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40117 __typeof(p2) __p2 = (p2); \
40118 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40119 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmulq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40120 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmulq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40121 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmulq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40122 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vmulq_x_n_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
40123 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vmulq_x_n_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
40124 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vmulq_x_n_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
40125 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40126 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40127 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmulq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
40128 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vmulq_x_n_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
40129 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vmulq_x_n_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
40130 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vmulq_x_n_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3));})
40132 #define __arm_vnegq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
40133 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40134 int (*)[__ARM_mve_type_int8x16_t]: __arm_vnegq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2), \
40135 int (*)[__ARM_mve_type_int16x8_t]: __arm_vnegq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2), \
40136 int (*)[__ARM_mve_type_int32x4_t]: __arm_vnegq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), p2));})
40138 #define __arm_vornq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40139 __typeof(p2) __p2 = (p2); \
40140 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40141 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vornq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40142 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vornq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40143 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vornq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40144 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vornq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40145 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vornq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40146 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vornq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40148 #define __arm_vorrq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40149 __typeof(p2) __p2 = (p2); \
40150 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40151 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vorrq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40152 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vorrq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40153 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vorrq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40154 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vorrq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40155 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vorrq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40156 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vorrq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40158 #define __arm_vrev32q_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
40159 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40160 int (*)[__ARM_mve_type_int8x16_t]: __arm_vrev32q_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2), \
40161 int (*)[__ARM_mve_type_int16x8_t]: __arm_vrev32q_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2), \
40162 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vrev32q_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2), \
40163 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vrev32q_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2));})
40165 #define __arm_vrev64q_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
40166 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40167 int (*)[__ARM_mve_type_int8x16_t]: __arm_vrev64q_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2), \
40168 int (*)[__ARM_mve_type_int16x8_t]: __arm_vrev64q_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2), \
40169 int (*)[__ARM_mve_type_int32x4_t]: __arm_vrev64q_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), p2), \
40170 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vrev64q_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2), \
40171 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vrev64q_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2), \
40172 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vrev64q_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), p2));})
40174 #define __arm_vabdq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40175 __typeof(p2) __p2 = (p2); \
40176 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40177 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vabdq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40178 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vabdq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40179 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vabdq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40180 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vabdq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40181 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vabdq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40182 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vabdq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40184 #define __arm_vandq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40185 __typeof(p2) __p2 = (p2); \
40186 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40187 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vandq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40188 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vandq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40189 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vandq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40190 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vandq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40191 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vandq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40192 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vandq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40194 #define __arm_vbicq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40195 __typeof(p2) __p2 = (p2); \
40196 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40197 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vbicq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40198 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vbicq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40199 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vbicq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40200 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vbicq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40201 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vbicq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40202 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vbicq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40204 #define __arm_vbrsrq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40205 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40206 int (*)[__ARM_mve_type_int8x16_t]: __arm_vbrsrq_x_n_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2, p3), \
40207 int (*)[__ARM_mve_type_int16x8_t]: __arm_vbrsrq_x_n_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40208 int (*)[__ARM_mve_type_int32x4_t]: __arm_vbrsrq_x_n_s32 (__ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
40209 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vbrsrq_x_n_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2, p3), \
40210 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vbrsrq_x_n_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
40211 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vbrsrq_x_n_u32 (__ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
40213 #define __arm_vld1q_z(p0,p1) ( _Generic( (int (*)[__ARM_mve_typeid(p0)])0, \
40214 int (*)[__ARM_mve_type_int8_t_ptr]: __arm_vld1q_z_s8 (__ARM_mve_coerce1(p0, int8_t *), p1), \
40215 int (*)[__ARM_mve_type_int16_t_ptr]: __arm_vld1q_z_s16 (__ARM_mve_coerce1(p0, int16_t *), p1), \
40216 int (*)[__ARM_mve_type_int32_t_ptr]: __arm_vld1q_z_s32 (__ARM_mve_coerce1(p0, int32_t *), p1), \
40217 int (*)[__ARM_mve_type_uint8_t_ptr]: __arm_vld1q_z_u8 (__ARM_mve_coerce1(p0, uint8_t *), p1), \
40218 int (*)[__ARM_mve_type_uint16_t_ptr]: __arm_vld1q_z_u16 (__ARM_mve_coerce1(p0, uint16_t *), p1), \
40219 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vld1q_z_u32 (__ARM_mve_coerce1(p0, uint32_t *), p1)))
40221 #define __arm_vld2q(p0) ( _Generic( (int (*)[__ARM_mve_typeid(p0)])0, \
40222 int (*)[__ARM_mve_type_int8_t_ptr]: __arm_vld2q_s8 (__ARM_mve_coerce1(p0, int8_t *)), \
40223 int (*)[__ARM_mve_type_int16_t_ptr]: __arm_vld2q_s16 (__ARM_mve_coerce1(p0, int16_t *)), \
40224 int (*)[__ARM_mve_type_int32_t_ptr]: __arm_vld2q_s32 (__ARM_mve_coerce1(p0, int32_t *)), \
40225 int (*)[__ARM_mve_type_uint8_t_ptr]: __arm_vld2q_u8 (__ARM_mve_coerce1(p0, uint8_t *)), \
40226 int (*)[__ARM_mve_type_uint16_t_ptr]: __arm_vld2q_u16 (__ARM_mve_coerce1(p0, uint16_t *)), \
40227 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vld2q_u32 (__ARM_mve_coerce1(p0, uint32_t *))))
40230 #define __arm_vld4q(p0) ( _Generic( (int (*)[__ARM_mve_typeid(p0)])0, \
40231 int (*)[__ARM_mve_type_int8_t_ptr]: __arm_vld4q_s8 (__ARM_mve_coerce1(p0, int8_t *)), \
40232 int (*)[__ARM_mve_type_int16_t_ptr]: __arm_vld4q_s16 (__ARM_mve_coerce1(p0, int16_t *)), \
40233 int (*)[__ARM_mve_type_int32_t_ptr]: __arm_vld4q_s32 (__ARM_mve_coerce1(p0, int32_t *)), \
40234 int (*)[__ARM_mve_type_uint8_t_ptr]: __arm_vld4q_u8 (__ARM_mve_coerce1(p0, uint8_t *)), \
40235 int (*)[__ARM_mve_type_uint16_t_ptr]: __arm_vld4q_u16 (__ARM_mve_coerce1(p0, uint16_t *)), \
40236 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vld4q_u32 (__ARM_mve_coerce1(p0, uint32_t *))))
40238 #define __arm_vsubq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40239 __typeof(p2) __p2 = (p2); \
40240 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40241 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vsubq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40242 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vsubq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40243 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vsubq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40244 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vsubq_x_n_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
40245 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vsubq_x_n_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
40246 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vsubq_x_n_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
40247 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vsubq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40248 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vsubq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40249 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vsubq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
40250 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vsubq_x_n_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
40251 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vsubq_x_n_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
40252 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vsubq_x_n_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3));})
40254 #define __arm_vgetq_lane(p0,p1) ({ __typeof(p0) __p0 = (p0); \
40255 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
40256 int (*)[__ARM_mve_type_int8x16_t]: __arm_vgetq_lane_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
40257 int (*)[__ARM_mve_type_int16x8_t]: __arm_vgetq_lane_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
40258 int (*)[__ARM_mve_type_int32x4_t]: __arm_vgetq_lane_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
40259 int (*)[__ARM_mve_type_int64x2_t]: __arm_vgetq_lane_s64 (__ARM_mve_coerce(__p0, int64x2_t), p1), \
40260 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vgetq_lane_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
40261 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vgetq_lane_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
40262 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vgetq_lane_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1), \
40263 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vgetq_lane_u64 (__ARM_mve_coerce(__p0, uint64x2_t), p1));})
40265 #define __arm_vsetq_lane(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
40266 __typeof(p1) __p1 = (p1); \
40267 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40268 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int8x16_t]: __arm_vsetq_lane_s8 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int8x16_t), p2), \
40269 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t]: __arm_vsetq_lane_s16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int16x8_t), p2), \
40270 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t]: __arm_vsetq_lane_s32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int32x4_t), p2), \
40271 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int64x2_t]: __arm_vsetq_lane_s64 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int64x2_t), p2), \
40272 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint8x16_t]: __arm_vsetq_lane_u8 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
40273 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint16x8_t]: __arm_vsetq_lane_u16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
40274 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t]: __arm_vsetq_lane_u32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
40275 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint64x2_t]: __arm_vsetq_lane_u64 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint64x2_t), p2));})
40277 #endif /* MVE Integer. */
40279 #define __arm_vshrntq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
40280 __typeof(p1) __p1 = (p1); \
40281 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40282 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vshrntq_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
40283 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vshrntq_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
40284 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vshrntq_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
40285 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vshrntq_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
40288 #define __arm_vrshrntq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
40289 __typeof(p1) __p1 = (p1); \
40290 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40291 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vrshrntq_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
40292 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vrshrntq_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
40293 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vrshrntq_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
40294 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vrshrntq_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
40297 #define __arm_vmvnq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
40298 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40299 int (*)[__ARM_mve_type_int8x16_t]: __arm_vmvnq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2), \
40300 int (*)[__ARM_mve_type_int16x8_t]: __arm_vmvnq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2), \
40301 int (*)[__ARM_mve_type_int32x4_t]: __arm_vmvnq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), p2), \
40302 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vmvnq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2), \
40303 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vmvnq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2), \
40304 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vmvnq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), p2));})
40306 #define __arm_vrev16q_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
40307 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40308 int (*)[__ARM_mve_type_int8x16_t]: __arm_vrev16q_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2), \
40309 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vrev16q_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2));})
40311 #define __arm_vrhaddq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40312 __typeof(p2) __p2 = (p2); \
40313 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40314 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrhaddq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40315 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrhaddq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40316 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrhaddq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40317 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vrhaddq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40318 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vrhaddq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40319 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vrhaddq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40321 #define __arm_vshlq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40322 __typeof(p2) __p2 = (p2); \
40323 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40324 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vshlq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40325 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vshlq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40326 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vshlq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40327 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vshlq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40328 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vshlq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40329 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vshlq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
40331 #define __arm_vrmulhq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40332 __typeof(p2) __p2 = (p2); \
40333 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40334 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrmulhq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40335 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrmulhq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40336 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrmulhq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40337 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vrmulhq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40338 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vrmulhq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40339 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vrmulhq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40341 #define __arm_vrshlq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40342 __typeof(p2) __p2 = (p2); \
40343 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40344 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrshlq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40345 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrshlq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40346 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrshlq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40347 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrshlq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40348 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrshlq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40349 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrshlq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
40351 #define __arm_vrshrq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40352 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40353 int (*)[__ARM_mve_type_int8x16_t]: __arm_vrshrq_x_n_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2, p3), \
40354 int (*)[__ARM_mve_type_int16x8_t]: __arm_vrshrq_x_n_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40355 int (*)[__ARM_mve_type_int32x4_t]: __arm_vrshrq_x_n_s32 (__ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
40356 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vrshrq_x_n_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2, p3), \
40357 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vrshrq_x_n_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
40358 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vrshrq_x_n_u32 (__ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
40360 #define __arm_vshllbq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40361 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40362 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshllbq_x_n_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2, p3), \
40363 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshllbq_x_n_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40364 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshllbq_x_n_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2, p3), \
40365 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshllbq_x_n_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2, p3));})
40367 #define __arm_vshlltq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40368 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40369 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshlltq_x_n_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2, p3), \
40370 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshlltq_x_n_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40371 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshlltq_x_n_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2, p3), \
40372 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshlltq_x_n_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2, p3));})
40374 #define __arm_vshlq_x_n(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40375 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40376 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshlq_x_n_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2, p3), \
40377 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshlq_x_n_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40378 int (*)[__ARM_mve_type_int32x4_t]: __arm_vshlq_x_n_s32 (__ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
40379 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshlq_x_n_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2, p3), \
40380 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshlq_x_n_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
40381 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vshlq_x_n_u32 (__ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
40383 #define __arm_vdwdupq_x_u8(p1,p2,p3,p4) ({ __typeof(p1) __p1 = (p1); \
40384 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40385 int (*)[__ARM_mve_type_int_n]: __arm_vdwdupq_x_n_u8 ((uint32_t) __p1, p2, p3, p4), \
40386 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vdwdupq_x_wb_u8 (__ARM_mve_coerce(__p1, uint32_t *), p2, p3, p4));})
40388 #define __arm_vdwdupq_x_u16(p1,p2,p3,p4) ({ __typeof(p1) __p1 = (p1); \
40389 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40390 int (*)[__ARM_mve_type_int_n]: __arm_vdwdupq_x_n_u16 ((uint32_t) __p1, p2, p3, p4), \
40391 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vdwdupq_x_wb_u16 (__ARM_mve_coerce(__p1, uint32_t *), p2, p3, p4));})
40393 #define __arm_vdwdupq_x_u32(p1,p2,p3,p4) ({ __typeof(p1) __p1 = (p1); \
40394 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40395 int (*)[__ARM_mve_type_int_n]: __arm_vdwdupq_x_n_u32 ((uint32_t) __p1, p2, p3, p4), \
40396 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vdwdupq_x_wb_u32 (__ARM_mve_coerce(__p1, uint32_t *), p2, p3, p4));})
40398 #define __arm_viwdupq_x_u8(p1,p2,p3,p4) ({ __typeof(p1) __p1 = (p1); \
40399 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40400 int (*)[__ARM_mve_type_int_n]: __arm_viwdupq_x_n_u8 ((uint32_t) __p1, p2, p3, p4), \
40401 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_viwdupq_x_wb_u8 (__ARM_mve_coerce(__p1, uint32_t *), p2, p3, p4));})
40403 #define __arm_viwdupq_x_u16(p1,p2,p3,p4) ({ __typeof(p1) __p1 = (p1); \
40404 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40405 int (*)[__ARM_mve_type_int_n]: __arm_viwdupq_x_n_u16 ((uint32_t) __p1, p2, p3, p4), \
40406 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_viwdupq_x_wb_u16 (__ARM_mve_coerce(__p1, uint32_t *), p2, p3, p4));})
40408 #define __arm_viwdupq_x_u32(p1,p2,p3,p4) ({ __typeof(p1) __p1 = (p1); \
40409 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40410 int (*)[__ARM_mve_type_int_n]: __arm_viwdupq_x_n_u32 ((uint32_t) __p1, p2, p3, p4), \
40411 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_viwdupq_x_wb_u32 (__ARM_mve_coerce(__p1, uint32_t *), p2, p3, p4));})
40413 #define __arm_vidupq_x_u8(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40414 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40415 int (*)[__ARM_mve_type_int_n]: __arm_vidupq_x_n_u8 ((uint32_t) __p1, p2, p3), \
40416 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vidupq_x_wb_u8 (__ARM_mve_coerce(__p1, uint32_t *), p2, p3));})
40418 #define __arm_vddupq_x_u8(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40419 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40420 int (*)[__ARM_mve_type_int_n]: __arm_vddupq_x_n_u8 ((uint32_t) __p1, p2, p3), \
40421 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vddupq_x_wb_u8 (__ARM_mve_coerce(__p1, uint32_t *), p2, p3));})
40423 #define __arm_vidupq_x_u16(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40424 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40425 int (*)[__ARM_mve_type_int_n]: __arm_vidupq_x_n_u16 ((uint32_t) __p1, p2, p3), \
40426 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vidupq_x_wb_u16 (__ARM_mve_coerce(__p1, uint32_t *), p2, p3));})
40428 #define __arm_vddupq_x_u16(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40429 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40430 int (*)[__ARM_mve_type_int_n]: __arm_vddupq_x_n_u16 ((uint32_t) __p1, p2, p3), \
40431 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vddupq_x_wb_u16 (__ARM_mve_coerce(__p1, uint32_t *), p2, p3));})
40433 #define __arm_vidupq_x_u32(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40434 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40435 int (*)[__ARM_mve_type_int_n]: __arm_vidupq_x_n_u32 ((uint32_t) __p1, p2, p3), \
40436 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vidupq_x_wb_u32 (__ARM_mve_coerce(__p1, uint32_t *), p2, p3));})
40438 #define __arm_vddupq_x_u32(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40439 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40440 int (*)[__ARM_mve_type_int_n]: __arm_vddupq_x_n_u32 ((uint32_t) __p1, p2, p3), \
40441 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vddupq_x_wb_u32 (__ARM_mve_coerce(__p1, uint32_t *), p2, p3));})
40443 #define __arm_vshrq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40444 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40445 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshrq_x_n_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2, p3), \
40446 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshrq_x_n_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40447 int (*)[__ARM_mve_type_int32x4_t]: __arm_vshrq_x_n_s32 (__ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
40448 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshrq_x_n_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2, p3), \
40449 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshrq_x_n_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
40450 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vshrq_x_n_u32 (__ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
40452 #define __arm_vhaddq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40453 __typeof(p2) __p2 = (p2); \
40454 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40455 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vhaddq_x_n_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
40456 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vhaddq_x_n_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
40457 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vhaddq_x_n_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
40458 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vhaddq_x_n_u8( __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
40459 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vhaddq_x_n_u16( __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
40460 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vhaddq_x_n_u32( __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3), \
40461 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vhaddq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40462 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vhaddq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40463 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vhaddq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40464 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vhaddq_x_u8( __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40465 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vhaddq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40466 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vhaddq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40468 #define __arm_vhcaddq_rot270_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40469 __typeof(p2) __p2 = (p2); \
40470 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40471 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vhcaddq_rot270_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40472 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vhcaddq_rot270_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40473 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vhcaddq_rot270_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
40475 #define __arm_vhcaddq_rot90_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40476 __typeof(p2) __p2 = (p2); \
40477 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40478 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vhcaddq_rot90_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40479 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vhcaddq_rot90_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40480 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vhcaddq_rot90_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
40482 #define __arm_vhsubq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
40483 __typeof(p2) __p2 = (p2); \
40484 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40485 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vhsubq_x_n_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
40486 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vhsubq_x_n_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
40487 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vhsubq_x_n_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
40488 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vhsubq_x_n_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
40489 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vhsubq_x_n_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
40490 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vhsubq_x_n_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3), \
40491 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vhsubq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40492 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vhsubq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40493 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vhsubq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40494 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vhsubq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40495 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vhsubq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40496 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vhsubq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40498 #define __arm_vclsq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
40499 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40500 int (*)[__ARM_mve_type_int8x16_t]: __arm_vclsq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2), \
40501 int (*)[__ARM_mve_type_int16x8_t]: __arm_vclsq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2), \
40502 int (*)[__ARM_mve_type_int32x4_t]: __arm_vclsq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), p2));})
40504 #define __arm_vclzq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
40505 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
40506 int (*)[__ARM_mve_type_int8x16_t]: __arm_vclzq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2), \
40507 int (*)[__ARM_mve_type_int16x8_t]: __arm_vclzq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2), \
40508 int (*)[__ARM_mve_type_int32x4_t]: __arm_vclzq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), p2), \
40509 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vclzq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2), \
40510 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vclzq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2), \
40511 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vclzq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), p2));})
40513 #define __arm_vadciq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
40514 __typeof(p1) __p1 = (p1); \
40515 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40516 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vadciq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
40517 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vadciq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
40519 #define __arm_vstrdq_scatter_base_wb_p(p0,p1,p2,p3) ({ __typeof(p2) __p2 = (p2); \
40520 _Generic( (int (*)[__ARM_mve_typeid(__p2)])0, \
40521 int (*)[__ARM_mve_type_int64x2_t]: __arm_vstrdq_scatter_base_wb_p_s64 (p0, p1, __ARM_mve_coerce(__p2, int64x2_t), p3), \
40522 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vstrdq_scatter_base_wb_p_u64 (p0, p1, __ARM_mve_coerce(__p2, uint64x2_t), p3));})
40524 #define __arm_vstrdq_scatter_base_wb(p0,p1,p2) ({ __typeof(p2) __p2 = (p2); \
40525 _Generic( (int (*)[__ARM_mve_typeid(__p2)])0, \
40526 int (*)[__ARM_mve_type_int64x2_t]: __arm_vstrdq_scatter_base_wb_s64 (p0, p1, __ARM_mve_coerce(__p2, int64x2_t)), \
40527 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vstrdq_scatter_base_wb_u64 (p0, p1, __ARM_mve_coerce(__p2, uint64x2_t)));})
40529 #define __arm_vldrdq_gather_offset(p0,p1) ( _Generic( (int (*)[__ARM_mve_typeid(p0)])0, \
40530 int (*)[__ARM_mve_type_int64_t_ptr]: __arm_vldrdq_gather_offset_s64 (__ARM_mve_coerce1(p0, int64_t *), p1), \
40531 int (*)[__ARM_mve_type_uint64_t_ptr]: __arm_vldrdq_gather_offset_u64 (__ARM_mve_coerce1(p0, uint64_t *), p1)))
40533 #define __arm_vldrdq_gather_offset_z(p0,p1,p2) ( _Generic( (int (*)[__ARM_mve_typeid(p0)])0, \
40534 int (*)[__ARM_mve_type_int64_t_ptr]: __arm_vldrdq_gather_offset_z_s64 (__ARM_mve_coerce1(p0, int64_t *), p1, p2), \
40535 int (*)[__ARM_mve_type_uint64_t_ptr]: __arm_vldrdq_gather_offset_z_u64 (__ARM_mve_coerce1(p0, uint64_t *), p1, p2)))
40537 #define __arm_vldrdq_gather_shifted_offset(p0,p1) ( _Generic( (int (*)[__ARM_mve_typeid(p0)])0, \
40538 int (*)[__ARM_mve_type_int64_t_ptr]: __arm_vldrdq_gather_shifted_offset_s64 (__ARM_mve_coerce1(p0, int64_t *), p1), \
40539 int (*)[__ARM_mve_type_uint64_t_ptr]: __arm_vldrdq_gather_shifted_offset_u64 (__ARM_mve_coerce1(p0, uint64_t *), p1)))
40541 #define __arm_vldrdq_gather_shifted_offset_z(p0,p1,p2) ( _Generic( (int (*)[__ARM_mve_typeid(p0)])0, \
40542 int (*)[__ARM_mve_type_int64_t_ptr]: __arm_vldrdq_gather_shifted_offset_z_s64 (__ARM_mve_coerce1(p0, int64_t *), p1, p2), \
40543 int (*)[__ARM_mve_type_uint64_t_ptr]: __arm_vldrdq_gather_shifted_offset_z_u64 (__ARM_mve_coerce1(p0, uint64_t *), p1, p2)))
40545 #define __arm_vadciq_m(p0,p1,p2,p3,p4) ({ __typeof(p0) __p0 = (p0); \
40546 __typeof(p1) __p1 = (p1); \
40547 __typeof(p2) __p2 = (p2); \
40548 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40549 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vadciq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3, p4), \
40550 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vadciq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3, p4));})
40552 #define __arm_vadciq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
40553 __typeof(p1) __p1 = (p1); \
40554 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40555 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vadciq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
40556 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vadciq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
40558 #define __arm_vadcq_m(p0,p1,p2,p3,p4) ({ __typeof(p0) __p0 = (p0); \
40559 __typeof(p1) __p1 = (p1); \
40560 __typeof(p2) __p2 = (p2); \
40561 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40562 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vadcq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3, p4), \
40563 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vadcq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3, p4));})
40565 #define __arm_vadcq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
40566 __typeof(p1) __p1 = (p1); \
40567 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40568 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vadcq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
40569 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vadcq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
40571 #define __arm_vsbciq_m(p0,p1,p2,p3,p4) ({ __typeof(p0) __p0 = (p0); \
40572 __typeof(p1) __p1 = (p1); \
40573 __typeof(p2) __p2 = (p2); \
40574 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40575 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vsbciq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3, p4), \
40576 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vsbciq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3, p4));})
40578 #define __arm_vsbciq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
40579 __typeof(p1) __p1 = (p1); \
40580 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40581 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vsbciq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
40582 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vsbciq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
40584 #define __arm_vsbcq_m(p0,p1,p2,p3,p4) ({ __typeof(p0) __p0 = (p0); \
40585 __typeof(p1) __p1 = (p1); \
40586 __typeof(p2) __p2 = (p2); \
40587 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40588 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vsbcq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3, p4), \
40589 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vsbcq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3, p4));})
40591 #define __arm_vsbcq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
40592 __typeof(p1) __p1 = (p1); \
40593 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40594 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vsbcq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
40595 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vsbcq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
40597 #define __arm_vldrbq_gather_offset_z(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
40598 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
40599 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_uint8x16_t]: __arm_vldrbq_gather_offset_z_s8 (__ARM_mve_coerce1(p0, int8_t *), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
40600 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrbq_gather_offset_z_s16 (__ARM_mve_coerce1(p0, int8_t *), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
40601 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrbq_gather_offset_z_s32 (__ARM_mve_coerce1(p0, int8_t *), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
40602 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint8x16_t]: __arm_vldrbq_gather_offset_z_u8 (__ARM_mve_coerce1(p0, uint8_t *), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
40603 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrbq_gather_offset_z_u16 (__ARM_mve_coerce1(p0, uint8_t *), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
40604 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrbq_gather_offset_z_u32 (__ARM_mve_coerce1(p0, uint8_t *), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
40606 #define __arm_vqrdmlahq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40607 __typeof(p1) __p1 = (p1); \
40608 __typeof(p2) __p2 = (p2); \
40609 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40610 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqrdmlahq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
40611 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqrdmlahq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
40612 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqrdmlahq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3));})
40614 #define __arm_vqrdmlashq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40615 __typeof(p1) __p1 = (p1); \
40616 __typeof(p2) __p2 = (p2); \
40617 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40618 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqrdmlashq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
40619 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqrdmlashq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
40620 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqrdmlashq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3));})
40622 #define __arm_vqdmlashq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40623 __typeof(p1) __p1 = (p1); \
40624 __typeof(p2) __p2 = (p2); \
40625 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40626 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqdmlashq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
40627 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqdmlashq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
40628 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqdmlashq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3));})
40630 #define __arm_vqrshlq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40631 __typeof(p1) __p1 = (p1); \
40632 __typeof(p2) __p2 = (p2); \
40633 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40634 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrshlq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40635 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrshlq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40636 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrshlq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40637 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrshlq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40638 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrshlq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40639 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrshlq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
40641 #define __arm_vqshlq_m_n(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40642 __typeof(p1) __p1 = (p1); \
40643 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40644 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqshlq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2, p3), \
40645 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqshlq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40646 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqshlq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
40647 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vqshlq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2, p3), \
40648 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vqshlq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
40649 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vqshlq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
40651 #define __arm_vqshlq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40652 __typeof(p1) __p1 = (p1); \
40653 __typeof(p2) __p2 = (p2); \
40654 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40655 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqshlq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40656 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqshlq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40657 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqshlq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40658 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqshlq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40659 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqshlq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40660 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqshlq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
40662 #define __arm_vrhaddq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40663 __typeof(p1) __p1 = (p1); \
40664 __typeof(p2) __p2 = (p2); \
40665 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40666 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrhaddq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40667 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrhaddq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40668 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrhaddq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40669 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vrhaddq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40670 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vrhaddq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40671 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vrhaddq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40673 #define __arm_vrmulhq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40674 __typeof(p1) __p1 = (p1); \
40675 __typeof(p2) __p2 = (p2); \
40676 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40677 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrmulhq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40678 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrmulhq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40679 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrmulhq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40680 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vrmulhq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40681 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vrmulhq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40682 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vrmulhq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40684 #define __arm_vrshlq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40685 __typeof(p1) __p1 = (p1); \
40686 __typeof(p2) __p2 = (p2); \
40687 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40688 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrshlq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40689 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrshlq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40690 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrshlq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40691 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrshlq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40692 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrshlq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40693 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrshlq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
40695 #define __arm_vrshrq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40696 __typeof(p1) __p1 = (p1); \
40697 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40698 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vrshrq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2, p3), \
40699 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vrshrq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40700 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrshrq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
40701 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vrshrq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2, p3), \
40702 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vrshrq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
40703 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vrshrq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
40705 #define __arm_vshrq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40706 __typeof(p1) __p1 = (p1); \
40707 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40708 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vshrq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2, p3), \
40709 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vshrq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40710 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vshrq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
40711 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vshrq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2, p3), \
40712 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vshrq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
40713 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vshrq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
40715 #define __arm_vsliq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40716 __typeof(p1) __p1 = (p1); \
40717 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40718 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vsliq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2, p3), \
40719 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vsliq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40720 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vsliq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
40721 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vsliq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2, p3), \
40722 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vsliq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
40723 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vsliq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
40725 #define __arm_vqsubq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40726 __typeof(p1) __p1 = (p1); \
40727 __typeof(p2) __p2 = (p2); \
40728 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40729 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqsubq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
40730 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqsubq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
40731 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqsubq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
40732 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vqsubq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
40733 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vqsubq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
40734 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vqsubq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3), \
40735 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqsubq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40736 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqsubq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40737 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqsubq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40738 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vqsubq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
40739 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vqsubq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40740 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vqsubq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40742 #define __arm_vqrdmulhq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40743 __typeof(p1) __p1 = (p1); \
40744 __typeof(p2) __p2 = (p2); \
40745 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40746 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrdmulhq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40747 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrdmulhq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40748 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrdmulhq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40749 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqrdmulhq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
40750 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqrdmulhq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
40751 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqrdmulhq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3));})
40753 #define __arm_vqrdmlsdhxq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40754 __typeof(p1) __p1 = (p1); \
40755 __typeof(p2) __p2 = (p2); \
40756 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40757 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrdmlsdhxq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40758 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrdmlsdhxq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40759 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrdmlsdhxq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
40761 #define __arm_vqrdmlsdhq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40762 __typeof(p1) __p1 = (p1); \
40763 __typeof(p2) __p2 = (p2); \
40764 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40765 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrdmlsdhq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40766 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrdmlsdhq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40767 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrdmlsdhq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
40769 #define __arm_vshllbq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40770 __typeof(p1) __p1 = (p1); \
40771 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40772 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int8x16_t]: __arm_vshllbq_m_n_s8 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int8x16_t), p2, p3), \
40773 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int16x8_t]: __arm_vshllbq_m_n_s16 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40774 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint8x16_t]: __arm_vshllbq_m_n_u8 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint8x16_t), p2, p3), \
40775 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint16x8_t]: __arm_vshllbq_m_n_u16 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3));})
40777 #define __arm_vshrntq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40778 __typeof(p1) __p1 = (p1); \
40779 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40780 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vshrntq_m_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40781 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vshrntq_m_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
40782 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vshrntq_m_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
40783 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vshrntq_m_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
40785 #define __arm_vshrnbq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40786 __typeof(p1) __p1 = (p1); \
40787 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40788 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vshrnbq_m_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40789 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vshrnbq_m_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
40790 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vshrnbq_m_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
40791 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vshrnbq_m_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
40793 #define __arm_vshlltq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40794 __typeof(p1) __p1 = (p1); \
40795 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40796 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int8x16_t]: __arm_vshlltq_m_n_s8 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int8x16_t), p2, p3), \
40797 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int16x8_t]: __arm_vshlltq_m_n_s16 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40798 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint8x16_t]: __arm_vshlltq_m_n_u8 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint8x16_t), p2, p3), \
40799 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint16x8_t]: __arm_vshlltq_m_n_u16 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3));})
40801 #define __arm_vrshrntq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40802 __typeof(p1) __p1 = (p1); \
40803 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40804 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vrshrntq_m_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40805 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vrshrntq_m_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
40806 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vrshrntq_m_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
40807 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vrshrntq_m_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
40809 #define __arm_vqshruntq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40810 __typeof(p1) __p1 = (p1); \
40811 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40812 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqshruntq_m_n_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40813 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqshruntq_m_n_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3));})
40815 #define __arm_vqshrunbq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40816 __typeof(p1) __p1 = (p1); \
40817 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40818 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqshrunbq_m_n_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40819 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqshrunbq_m_n_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3));})
40821 #define __arm_vqrshrnbq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40822 __typeof(p1) __p1 = (p1); \
40823 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40824 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqrshrnbq_m_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40825 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqrshrnbq_m_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
40826 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqrshrnbq_m_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
40827 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqrshrnbq_m_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
40829 #define __arm_vqrshrntq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40830 __typeof(p1) __p1 = (p1); \
40831 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40832 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqrshrntq_m_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40833 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqrshrntq_m_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
40834 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqrshrntq_m_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
40835 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqrshrntq_m_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
40837 #define __arm_vqrshrunbq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40838 __typeof(p1) __p1 = (p1); \
40839 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40840 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqrshrunbq_m_n_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40841 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqrshrunbq_m_n_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3));})
40843 #define __arm_vqrshruntq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40844 __typeof(p1) __p1 = (p1); \
40845 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40846 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqrshruntq_m_n_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40847 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqrshruntq_m_n_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3));})
40849 #define __arm_vqshrnbq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40850 __typeof(p1) __p1 = (p1); \
40851 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40852 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqshrnbq_m_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40853 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqshrnbq_m_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
40854 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqshrnbq_m_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
40855 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqshrnbq_m_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
40857 #define __arm_vqshrntq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40858 __typeof(p1) __p1 = (p1); \
40859 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40860 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqshrntq_m_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40861 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqshrntq_m_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
40862 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vqshrntq_m_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
40863 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vqshrntq_m_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
40865 #define __arm_vrshrnbq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40866 __typeof(p1) __p1 = (p1); \
40867 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40868 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int16x8_t]: __arm_vrshrnbq_m_n_s16 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40869 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int32x4_t]: __arm_vrshrnbq_m_n_s32 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
40870 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint16x8_t]: __arm_vrshrnbq_m_n_u16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
40871 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32x4_t]: __arm_vrshrnbq_m_n_u32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
40873 #define __arm_vmlaldavaq_p(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40874 __typeof(p1) __p1 = (p1); \
40875 __typeof(p2) __p2 = (p2); \
40876 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40877 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlaldavaq_p_s16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40878 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlaldavaq_p_s32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40879 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmlaldavaq_p_u16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
40880 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmlaldavaq_p_u32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
40882 #define __arm_vmlaldavaxq_p(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40883 __typeof(p1) __p1 = (p1); \
40884 __typeof(p2) __p2 = (p2); \
40885 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40886 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlaldavaxq_p_s16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40887 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlaldavaxq_p_s32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
40889 #define __arm_vmlsldavaq_p(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40890 __typeof(p1) __p1 = (p1); \
40891 __typeof(p2) __p2 = (p2); \
40892 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40893 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlsldavaq_p_s16(__p0, __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40894 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlsldavaq_p_s32(__p0, __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
40896 #define __arm_vmlsldavaxq_p(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40897 __typeof(p1) __p1 = (p1); \
40898 __typeof(p2) __p2 = (p2); \
40899 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40900 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlsldavaxq_p_s16(__p0, __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40901 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlsldavaxq_p_s32(__p0, __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
40903 #define __arm_vrmlaldavhaxq_p(p0,p1,p2,p3) __arm_vrmlaldavhaxq_p_s32(p0,p1,p2,p3)
40905 #define __arm_vrmlsldavhaq_p(p0,p1,p2,p3) __arm_vrmlsldavhaq_p_s32(p0,p1,p2,p3)
40907 #define __arm_vrmlsldavhaxq_p(p0,p1,p2,p3) __arm_vrmlsldavhaxq_p_s32(p0,p1,p2,p3)
40909 #define __arm_vqdmladhq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40910 __typeof(p1) __p1 = (p1); \
40911 __typeof(p2) __p2 = (p2); \
40912 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40913 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqdmladhq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40914 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmladhq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40915 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmladhq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
40917 #define __arm_vqdmladhxq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40918 __typeof(p1) __p1 = (p1); \
40919 __typeof(p2) __p2 = (p2); \
40920 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40921 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqdmladhxq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40922 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmladhxq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40923 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmladhxq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
40925 #define __arm_vqdmlsdhq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40926 __typeof(p1) __p1 = (p1); \
40927 __typeof(p2) __p2 = (p2); \
40928 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40929 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqdmlsdhq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40930 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmlsdhq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40931 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmlsdhq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
40933 #define __arm_vqdmlsdhxq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40934 __typeof(p1) __p1 = (p1); \
40935 __typeof(p2) __p2 = (p2); \
40936 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40937 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqdmlsdhxq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40938 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmlsdhxq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40939 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmlsdhxq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
40941 #define __arm_vqabsq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
40942 __typeof(p1) __p1 = (p1); \
40943 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40944 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqabsq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
40945 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqabsq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
40946 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqabsq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
40948 #define __arm_vmvnq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
40949 __typeof(p1) __p1 = (p1); \
40950 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40951 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmvnq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
40952 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmvnq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
40953 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmvnq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
40954 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmvnq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
40955 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmvnq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
40956 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmvnq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
40957 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vmvnq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce1(__p1, int) , p2), \
40958 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vmvnq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce1(__p1, int) , p2), \
40959 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vmvnq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce1(__p1, int) , p2), \
40960 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vmvnq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce1(__p1, int) , p2));})
40962 #define __arm_vorrq_m_n(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
40963 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
40964 int (*)[__ARM_mve_type_int16x8_t]: __arm_vorrq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1, p2), \
40965 int (*)[__ARM_mve_type_int32x4_t]: __arm_vorrq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1, p2), \
40966 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vorrq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1, p2), \
40967 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vorrq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1, p2));})
40969 #define __arm_vqshrunbq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
40970 __typeof(p1) __p1 = (p1); \
40971 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40972 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int16x8_t]: __arm_vqshrunbq_n_s16 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
40973 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int32x4_t]: __arm_vqshrunbq_n_s32 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
40975 #define __arm_vqshluq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40976 __typeof(p1) __p1 = (p1); \
40977 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40978 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqshluq_m_n_s8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2, p3), \
40979 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqshluq_m_n_s16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40980 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqshluq_m_n_s32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3));})
40982 #define __arm_vshlq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40983 __typeof(p1) __p1 = (p1); \
40984 __typeof(p2) __p2 = (p2); \
40985 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
40986 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vshlq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40987 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vshlq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40988 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vshlq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
40989 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vshlq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
40990 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vshlq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
40991 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vshlq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
40993 #define __arm_vshlq_m_n(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
40994 __typeof(p1) __p1 = (p1); \
40995 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
40996 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vshlq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2, p3), \
40997 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vshlq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
40998 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vshlq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
40999 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vshlq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2, p3), \
41000 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vshlq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
41001 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vshlq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
41003 #define __arm_vshlq_m_r(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41004 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41005 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshlq_m_r_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1, p2), \
41006 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshlq_m_r_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1, p2), \
41007 int (*)[__ARM_mve_type_int32x4_t]: __arm_vshlq_m_r_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1, p2), \
41008 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshlq_m_r_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1, p2), \
41009 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshlq_m_r_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1, p2), \
41010 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vshlq_m_r_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1, p2));})
41012 #define __arm_vsriq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41013 __typeof(p1) __p1 = (p1); \
41014 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41015 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vsriq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2, p3), \
41016 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vsriq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2, p3), \
41017 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vsriq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2, p3), \
41018 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vsriq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2, p3), \
41019 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vsriq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2, p3), \
41020 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vsriq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2, p3));})
41022 #define __arm_vhaddq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41023 __typeof(p1) __p1 = (p1); \
41024 __typeof(p2) __p2 = (p2); \
41025 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41026 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vhaddq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
41027 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vhaddq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
41028 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vhaddq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
41029 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vhaddq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
41030 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vhaddq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
41031 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vhaddq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3), \
41032 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vhaddq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41033 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vhaddq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41034 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vhaddq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41035 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vhaddq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41036 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vhaddq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
41037 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vhaddq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
41039 #define __arm_vhcaddq_rot270_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41040 __typeof(p1) __p1 = (p1); \
41041 __typeof(p2) __p2 = (p2); \
41042 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41043 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vhcaddq_rot270_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41044 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vhcaddq_rot270_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41045 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vhcaddq_rot270_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
41047 #define __arm_vhcaddq_rot90_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41048 __typeof(p1) __p1 = (p1); \
41049 __typeof(p2) __p2 = (p2); \
41050 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41051 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vhcaddq_rot90_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41052 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vhcaddq_rot90_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41053 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vhcaddq_rot90_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
41055 #define __arm_vhsubq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41056 __typeof(p1) __p1 = (p1); \
41057 __typeof(p2) __p2 = (p2); \
41058 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41059 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vhsubq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41060 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vhsubq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41061 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vhsubq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41062 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vhsubq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41063 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vhsubq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
41064 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vhsubq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3), \
41065 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vhsubq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
41066 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vhsubq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
41067 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vhsubq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
41068 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vhsubq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
41069 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vhsubq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
41070 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vhsubq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3));})
41072 #define __arm_vmaxq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41073 __typeof(p1) __p1 = (p1); \
41074 __typeof(p2) __p2 = (p2); \
41075 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41076 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmaxq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41077 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmaxq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41078 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmaxq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41079 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmaxq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41080 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmaxq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
41081 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmaxq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
41083 #define __arm_vminq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41084 __typeof(p1) __p1 = (p1); \
41085 __typeof(p2) __p2 = (p2); \
41086 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41087 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vminq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41088 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vminq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41089 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vminq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41090 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vminq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41091 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vminq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
41092 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vminq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
41094 #define __arm_vmlaq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41095 __typeof(p1) __p1 = (p1); \
41096 __typeof(p2) __p2 = (p2); \
41097 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41098 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vmlaq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
41099 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vmlaq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
41100 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vmlaq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
41101 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vmlaq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
41102 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vmlaq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
41103 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vmlaq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3));})
41105 #define __arm_vmlasq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41106 __typeof(p1) __p1 = (p1); \
41107 __typeof(p2) __p2 = (p2); \
41108 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41109 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vmlasq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
41110 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vmlasq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
41111 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vmlasq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
41112 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vmlasq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
41113 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vmlasq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
41114 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vmlasq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3));})
41116 #define __arm_vmulhq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41117 __typeof(p1) __p1 = (p1); \
41118 __typeof(p2) __p2 = (p2); \
41119 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41120 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmulhq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41121 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmulhq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41122 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmulhq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41123 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulhq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41124 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulhq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
41125 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmulhq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
41127 #define __arm_vmullbq_int_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41128 __typeof(p1) __p1 = (p1); \
41129 __typeof(p2) __p2 = (p2); \
41130 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41131 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmullbq_int_m_s8 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41132 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmullbq_int_m_s16 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41133 int (*)[__ARM_mve_type_int64x2_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmullbq_int_m_s32 (__ARM_mve_coerce(__p0, int64x2_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41134 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmullbq_int_m_u8 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41135 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmullbq_int_m_u16 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
41136 int (*)[__ARM_mve_type_uint64x2_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmullbq_int_m_u32 (__ARM_mve_coerce(__p0, uint64x2_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
41138 #define __arm_vmulltq_int_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41139 __typeof(p1) __p1 = (p1); \
41140 __typeof(p2) __p2 = (p2); \
41141 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41142 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmulltq_int_m_s8 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41143 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmulltq_int_m_s16 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41144 int (*)[__ARM_mve_type_int64x2_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmulltq_int_m_s32 (__ARM_mve_coerce(__p0, int64x2_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41145 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulltq_int_m_u8 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41146 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulltq_int_m_u16 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
41147 int (*)[__ARM_mve_type_uint64x2_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmulltq_int_m_u32 (__ARM_mve_coerce(__p0, uint64x2_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
41149 #define __arm_vmulltq_poly_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41150 __typeof(p1) __p1 = (p1); \
41151 __typeof(p2) __p2 = (p2); \
41152 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41153 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulltq_poly_m_p8 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41154 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulltq_poly_m_p16 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3));})
41156 #define __arm_vqaddq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41157 __typeof(p1) __p1 = (p1); \
41158 __typeof(p2) __p2 = (p2); \
41159 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41160 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqaddq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
41161 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqaddq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
41162 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqaddq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
41163 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vqaddq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce3(p2, int), p3), \
41164 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vqaddq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce3(p2, int), p3), \
41165 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vqaddq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce3(p2, int), p3), \
41166 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqaddq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41167 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqaddq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41168 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqaddq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41169 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vqaddq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41170 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vqaddq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
41171 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vqaddq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
41173 #define __arm_vqdmlahq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41174 __typeof(p1) __p1 = (p1); \
41175 __typeof(p2) __p2 = (p2); \
41176 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41177 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqdmlahq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
41178 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqdmlahq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
41179 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqdmlahq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3));})
41181 #define __arm_vqdmulhq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41182 __typeof(p1) __p1 = (p1); \
41183 __typeof(p2) __p2 = (p2); \
41184 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41185 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int_n]: __arm_vqdmulhq_m_n_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce3(p2, int), p3), \
41186 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqdmulhq_m_n_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
41187 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqdmulhq_m_n_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
41188 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqdmulhq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41189 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmulhq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41190 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmulhq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
41192 #define __arm_vqdmullbq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41193 __typeof(p1) __p1 = (p1); \
41194 __typeof(p2) __p2 = (p2); \
41195 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41196 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmullbq_m_s16 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41197 int (*)[__ARM_mve_type_int64x2_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmullbq_m_s32 (__ARM_mve_coerce(__p0, int64x2_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41198 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqdmullbq_m_n_s16 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
41199 int (*)[__ARM_mve_type_int64x2_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqdmullbq_m_n_s32 (__ARM_mve_coerce(__p0, int64x2_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3));})
41201 #define __arm_vqdmulltq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41202 __typeof(p1) __p1 = (p1); \
41203 __typeof(p2) __p2 = (p2); \
41204 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41205 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int_n]: __arm_vqdmulltq_m_n_s16 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce3(p2, int), p3), \
41206 int (*)[__ARM_mve_type_int64x2_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int_n]: __arm_vqdmulltq_m_n_s32 (__ARM_mve_coerce(__p0, int64x2_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce3(p2, int), p3), \
41207 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqdmulltq_m_s16 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41208 int (*)[__ARM_mve_type_int64x2_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqdmulltq_m_s32 (__ARM_mve_coerce(__p0, int64x2_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
41210 #define __arm_vqrdmladhq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41211 __typeof(p1) __p1 = (p1); \
41212 __typeof(p2) __p2 = (p2); \
41213 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41214 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrdmladhq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41215 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrdmladhq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41216 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrdmladhq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
41218 #define __arm_vqrdmladhxq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41219 __typeof(p1) __p1 = (p1); \
41220 __typeof(p2) __p2 = (p2); \
41221 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41222 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vqrdmladhxq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41223 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vqrdmladhxq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41224 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vqrdmladhxq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
41226 #define __arm_vmlsdavaxq_p(p0,p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
41227 __typeof(p2) __p2 = (p2); \
41228 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41229 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmlsdavaxq_p_s8 (p0, __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41230 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlsdavaxq_p_s16 (p0, __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41231 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlsdavaxq_p_s32 (p0, __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
41233 #define __arm_vmlsdavaq(p0,p1,p2) ({ __typeof(p1) __p1 = (p1); \
41234 __typeof(p2) __p2 = (p2); \
41235 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41236 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmlsdavaq_s8(p0, __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
41237 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlsdavaq_s16(p0, __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
41238 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlsdavaq_s32(p0, __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
41240 #define __arm_vmlsdavaxq(p0,p1,p2) ({ __typeof(p2) __p2 = (p2); \
41241 __typeof(p1) __p1 = (p1); \
41242 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41243 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmlsdavaxq_s8(p0, __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
41244 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlsdavaxq_s16(p0, __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
41245 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlsdavaxq_s32(p0, __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
41247 #define __arm_vmlsdavq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41248 __typeof(p1) __p1 = (p1); \
41249 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41250 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmlsdavq_p_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
41251 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlsdavq_p_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
41252 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlsdavq_p_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
41254 #define __arm_vmlsdavxq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41255 __typeof(p1) __p1 = (p1); \
41256 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41257 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmlsdavxq_p_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
41258 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlsdavxq_p_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
41259 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlsdavxq_p_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
41261 #define __arm_vmlsdavaq_p(p0,p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
41262 __typeof(p2) __p2 = (p2); \
41263 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41264 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmlsdavaq_p_s8(p0, __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41265 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlsdavaq_p_s16(p0, __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41266 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlsdavaq_p_s32(p0, __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
41268 #define __arm_vmladavaxq_p(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41269 __typeof(p1) __p1 = (p1); \
41270 __typeof(p2) __p2 = (p2); \
41271 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41272 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmladavaxq_p_s8 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41273 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmladavaxq_p_s16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41274 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmladavaxq_p_s32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3));})
41276 #define __arm_vmullbq_poly_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41277 __typeof(p1) __p1 = (p1); \
41278 __typeof(p2) __p2 = (p2); \
41279 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41280 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmullbq_poly_m_p8 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41281 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmullbq_poly_m_p16 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3));})
41283 #define __arm_vldrbq_gather_offset(p0,p1) ({ __typeof(p1) __p1 = (p1); \
41284 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
41285 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_uint8x16_t]: __arm_vldrbq_gather_offset_s8(__ARM_mve_coerce1(p0, int8_t *), __ARM_mve_coerce(__p1, uint8x16_t)), \
41286 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrbq_gather_offset_s16(__ARM_mve_coerce1(p0, int8_t *), __ARM_mve_coerce(__p1, uint16x8_t)), \
41287 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrbq_gather_offset_s32(__ARM_mve_coerce1(p0, int8_t *), __ARM_mve_coerce(__p1, uint32x4_t)), \
41288 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint8x16_t]: __arm_vldrbq_gather_offset_u8(__ARM_mve_coerce1(p0, uint8_t *), __ARM_mve_coerce(__p1, uint8x16_t)), \
41289 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vldrbq_gather_offset_u16(__ARM_mve_coerce1(p0, uint8_t *), __ARM_mve_coerce(__p1, uint16x8_t)), \
41290 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vldrbq_gather_offset_u32(__ARM_mve_coerce1(p0, uint8_t *), __ARM_mve_coerce(__p1, uint32x4_t)));})
41292 #define __arm_vidupq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41293 __typeof(p1) __p1 = (p1); \
41294 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41295 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vidupq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), (uint32_t) __p1, p2, p3), \
41296 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vidupq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), (uint32_t) __p1, p2, p3), \
41297 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vidupq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), (uint32_t) __p1, p2, p3), \
41298 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint32_t_ptr]: __arm_vidupq_m_wb_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint32_t *), p2, p3), \
41299 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32_t_ptr]: __arm_vidupq_m_wb_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32_t *), p2, p3), \
41300 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32_t_ptr]: __arm_vidupq_m_wb_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32_t *), p2, p3));})
41302 #define __arm_vddupq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41303 __typeof(p1) __p1 = (p1); \
41304 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41305 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vddupq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), (uint32_t) __p1, p2, p3), \
41306 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vddupq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), (uint32_t) __p1, p2, p3), \
41307 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vddupq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), (uint32_t) __p1, p2, p3), \
41308 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint32_t_ptr]: __arm_vddupq_m_wb_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint32_t *), p2, p3), \
41309 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32_t_ptr]: __arm_vddupq_m_wb_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32_t *), p2, p3), \
41310 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32_t_ptr]: __arm_vddupq_m_wb_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32_t *), p2, p3));})
41312 #define __arm_vidupq_u16(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41313 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41314 int (*)[__ARM_mve_type_int_n]: __arm_vidupq_n_u16 ((uint32_t) __p0, p1), \
41315 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vidupq_wb_u16 (__ARM_mve_coerce(__p0, uint32_t *), p1));})
41317 #define __arm_vidupq_u32(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41318 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41319 int (*)[__ARM_mve_type_int_n]: __arm_vidupq_n_u32 ((uint32_t) __p0, p1), \
41320 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vidupq_wb_u32 (__ARM_mve_coerce(__p0, uint32_t *), p1));})
41322 #define __arm_vidupq_u8(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41323 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41324 int (*)[__ARM_mve_type_int_n]: __arm_vidupq_n_u8 ((uint32_t) __p0, p1), \
41325 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vidupq_wb_u8 (__ARM_mve_coerce(__p0, uint32_t *), p1));})
41327 #define __arm_vddupq_u16(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41328 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41329 int (*)[__ARM_mve_type_int_n]: __arm_vddupq_n_u16 ((uint32_t) __p0, p1), \
41330 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vddupq_wb_u16 (__ARM_mve_coerce(__p0, uint32_t *), p1));})
41332 #define __arm_vddupq_u32(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41333 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41334 int (*)[__ARM_mve_type_int_n]: __arm_vddupq_n_u32 ((uint32_t) __p0, p1), \
41335 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vddupq_wb_u32 (__ARM_mve_coerce(__p0, uint32_t *), p1));})
41337 #define __arm_vddupq_u8(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41338 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41339 int (*)[__ARM_mve_type_int_n]: __arm_vddupq_n_u8 ((uint32_t) __p0, p1), \
41340 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vddupq_wb_u8 (__ARM_mve_coerce(__p0, uint32_t *), p1));})
41342 #define __arm_viwdupq_m(p0,p1,p2,p3,p4) ({ __typeof(p0) __p0 = (p0); \
41343 __typeof(p1) __p1 = (p1); \
41344 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41345 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_viwdupq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int), p2, p3, p4), \
41346 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_viwdupq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int), p2, p3, p4), \
41347 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_viwdupq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int), p2, p3, p4), \
41348 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint32_t_ptr]: __arm_viwdupq_m_wb_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint32_t *), p2, p3, p4), \
41349 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32_t_ptr]: __arm_viwdupq_m_wb_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32_t *), p2, p3, p4), \
41350 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32_t_ptr]: __arm_viwdupq_m_wb_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32_t *), p2, p3, p4));})
41352 #define __arm_viwdupq_u16(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41353 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41354 int (*)[__ARM_mve_type_int_n]: __arm_viwdupq_n_u16 (__ARM_mve_coerce3(p0, int), p1, (const int) p2), \
41355 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_viwdupq_wb_u16 (__ARM_mve_coerce(__p0, uint32_t *), p1, (const int) p2));})
41357 #define __arm_viwdupq_u32(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41358 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41359 int (*)[__ARM_mve_type_int_n]: __arm_viwdupq_n_u32 (__ARM_mve_coerce3(p0, int), p1, p2), \
41360 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_viwdupq_wb_u32 (__ARM_mve_coerce(__p0, uint32_t *), p1, p2));})
41362 #define __arm_viwdupq_u8(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41363 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41364 int (*)[__ARM_mve_type_int_n]: __arm_viwdupq_n_u8 (__ARM_mve_coerce3(p0, int), p1, p2), \
41365 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_viwdupq_wb_u8 (__ARM_mve_coerce(__p0, uint32_t *), p1, p2));})
41367 #define __arm_vdwdupq_m(p0,p1,p2,p3,p4) ({ __typeof(p0) __p0 = (p0); \
41368 __typeof(p1) __p1 = (p1); \
41369 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41370 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vdwdupq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int), p2, p3, p4), \
41371 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vdwdupq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int), p2, p3, p4), \
41372 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vdwdupq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int), p2, p3, p4), \
41373 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint32_t_ptr]: __arm_vdwdupq_m_wb_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint32_t *), p2, p3, p4), \
41374 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint32_t_ptr]: __arm_vdwdupq_m_wb_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint32_t *), p2, p3, p4), \
41375 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32_t_ptr]: __arm_vdwdupq_m_wb_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32_t *), p2, p3, p4));})
41377 #define __arm_vdwdupq_u16(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41378 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41379 int (*)[__ARM_mve_type_int_n]: __arm_vdwdupq_n_u16 (__ARM_mve_coerce3(p0, int), p1, p2), \
41380 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vdwdupq_wb_u16 (__ARM_mve_coerce(__p0, uint32_t *), p1, p2));})
41382 #define __arm_vdwdupq_u32(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41383 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41384 int (*)[__ARM_mve_type_int_n]: __arm_vdwdupq_n_u32 (__ARM_mve_coerce3(p0, int), p1, p2), \
41385 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vdwdupq_wb_u32 (__ARM_mve_coerce(__p0, uint32_t *), p1, p2));})
41387 #define __arm_vdwdupq_u8(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41388 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41389 int (*)[__ARM_mve_type_int_n]: __arm_vdwdupq_n_u8 (__ARM_mve_coerce3(p0, int), p1, p2), \
41390 int (*)[__ARM_mve_type_uint32_t_ptr]: __arm_vdwdupq_wb_u8 (__ARM_mve_coerce(__p0, uint32_t *), p1, p2));})
41392 #define __arm_vshlcq_m(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41393 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41394 int (*)[__ARM_mve_type_int8x16_t]: __arm_vshlcq_m_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1, p2, p3), \
41395 int (*)[__ARM_mve_type_int16x8_t]: __arm_vshlcq_m_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1, p2, p3), \
41396 int (*)[__ARM_mve_type_int32x4_t]: __arm_vshlcq_m_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1, p2, p3), \
41397 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vshlcq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1, p2, p3), \
41398 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vshlcq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1, p2, p3), \
41399 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vshlcq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1, p2, p3));})
41401 #define __arm_vabavq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41402 __typeof(p1) __p1 = (p1); \
41403 __typeof(p2) __p2 = (p2); \
41404 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41405 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vabavq_s8 (__p0, __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
41406 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vabavq_s16 (__p0, __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
41407 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vabavq_s32 (__p0, __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)), \
41408 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vabavq_u8 (__p0, __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t)), \
41409 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vabavq_u16 (__p0, __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t)), \
41410 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vabavq_u32 (__p0, __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t)));})
41412 #define __arm_vabavq_p(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41413 __typeof(p1) __p1 = (p1); \
41414 __typeof(p2) __p2 = (p2); \
41415 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41416 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vabavq_p_s8(__p0, __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41417 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vabavq_p_s16(__p0, __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41418 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vabavq_p_s32(__p0, __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41419 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vabavq_p_u8(__p0, __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41420 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vabavq_p_u16(__p0, __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
41421 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vabavq_p_u32(__p0, __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
41423 #define __arm_vaddlvaq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41424 __typeof(p1) __p1 = (p1); \
41425 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41426 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t]: __arm_vaddlvaq_s32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int32x4_t)), \
41427 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t]: __arm_vaddlvaq_u32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint32x4_t)));})
41429 #define __arm_vaddlvaq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41430 __typeof(p1) __p1 = (p1); \
41431 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41432 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t]: __arm_vaddlvaq_p_s32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int32x4_t), p2), \
41433 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t]: __arm_vaddlvaq_p_u32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
41435 #define __arm_vaddlvq(p0) ({ __typeof(p0) __p0 = (p0); \
41436 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41437 int (*)[__ARM_mve_type_int32x4_t]: __arm_vaddlvq_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
41438 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vaddlvq_u32 (__ARM_mve_coerce(__p0, uint32x4_t)));})
41440 #define __arm_vaddlvq_p(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41441 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41442 int (*)[__ARM_mve_type_int32x4_t]: __arm_vaddlvq_p_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
41443 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vaddlvq_p_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1));})
41445 #define __arm_vaddvaq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41446 __typeof(p1) __p1 = (p1); \
41447 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41448 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int8x16_t]: __arm_vaddvaq_s8 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int8x16_t)), \
41449 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t]: __arm_vaddvaq_s16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int16x8_t)), \
41450 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t]: __arm_vaddvaq_s32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int32x4_t)), \
41451 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint8x16_t]: __arm_vaddvaq_u8 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint8x16_t)), \
41452 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint16x8_t]: __arm_vaddvaq_u16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint16x8_t)), \
41453 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t]: __arm_vaddvaq_u32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint32x4_t)));})
41455 #define __arm_vaddvaq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41456 __typeof(p1) __p1 = (p1); \
41457 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41458 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int8x16_t]: __arm_vaddvaq_p_s8 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int8x16_t), p2), \
41459 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t]: __arm_vaddvaq_p_s16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int16x8_t), p2), \
41460 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t]: __arm_vaddvaq_p_s32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int32x4_t), p2), \
41461 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint8x16_t]: __arm_vaddvaq_p_u8 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
41462 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint16x8_t]: __arm_vaddvaq_p_u16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
41463 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t]: __arm_vaddvaq_p_u32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
41465 #define __arm_vaddvq(p0) ({ __typeof(p0) __p0 = (p0); \
41466 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41467 int (*)[__ARM_mve_type_int8x16_t]: __arm_vaddvq_s8 (__ARM_mve_coerce(__p0, int8x16_t)), \
41468 int (*)[__ARM_mve_type_int16x8_t]: __arm_vaddvq_s16 (__ARM_mve_coerce(__p0, int16x8_t)), \
41469 int (*)[__ARM_mve_type_int32x4_t]: __arm_vaddvq_s32 (__ARM_mve_coerce(__p0, int32x4_t)), \
41470 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vaddvq_u8 (__ARM_mve_coerce(__p0, uint8x16_t)), \
41471 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vaddvq_u16 (__ARM_mve_coerce(__p0, uint16x8_t)), \
41472 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vaddvq_u32 (__ARM_mve_coerce(__p0, uint32x4_t)));})
41474 #define __arm_vaddvq_p(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41475 _Generic( (int (*)[__ARM_mve_typeid(__p0)])0, \
41476 int (*)[__ARM_mve_type_int8x16_t]: __arm_vaddvq_p_s8 (__ARM_mve_coerce(__p0, int8x16_t), p1), \
41477 int (*)[__ARM_mve_type_int16x8_t]: __arm_vaddvq_p_s16 (__ARM_mve_coerce(__p0, int16x8_t), p1), \
41478 int (*)[__ARM_mve_type_int32x4_t]: __arm_vaddvq_p_s32 (__ARM_mve_coerce(__p0, int32x4_t), p1), \
41479 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vaddvq_p_u8 (__ARM_mve_coerce(__p0, uint8x16_t), p1), \
41480 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vaddvq_p_u16 (__ARM_mve_coerce(__p0, uint16x8_t), p1), \
41481 int (*)[__ARM_mve_type_uint32x4_t]: __arm_vaddvq_p_u32 (__ARM_mve_coerce(__p0, uint32x4_t), p1));})
41483 #define __arm_vcmpcsq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41484 __typeof(p1) __p1 = (p1); \
41485 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41486 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcmpcsq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
41487 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcmpcsq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
41488 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcmpcsq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
41489 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vcmpcsq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
41490 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vcmpcsq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
41491 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vcmpcsq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)));})
41493 #define __arm_vcmpcsq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41494 __typeof(p1) __p1 = (p1); \
41495 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41496 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcmpcsq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
41497 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcmpcsq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
41498 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcmpcsq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2), \
41499 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vcmpcsq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int), p2), \
41500 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vcmpcsq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int), p2), \
41501 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vcmpcsq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int), p2));})
41503 #define __arm_vcmphiq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41504 __typeof(p1) __p1 = (p1); \
41505 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41506 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcmphiq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
41507 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcmphiq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
41508 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcmphiq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)), \
41509 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vcmphiq_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int)), \
41510 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vcmphiq_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int)), \
41511 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vcmphiq_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int)));})
41513 #define __arm_vcmphiq_m(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41514 __typeof(p1) __p1 = (p1); \
41515 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41516 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_int_n]: __arm_vcmphiq_m_n_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce3(p1, int), p2), \
41517 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_int_n]: __arm_vcmphiq_m_n_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce3(p1, int), p2), \
41518 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_int_n]: __arm_vcmphiq_m_n_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce3(p1, int), p2), \
41519 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vcmphiq_m_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
41520 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vcmphiq_m_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
41521 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vcmphiq_m_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
41523 #define __arm_vmaxavq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41524 __typeof(p1) __p1 = (p1); \
41525 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41526 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int8x16_t]: __arm_vmaxavq_s8 (__p0, __ARM_mve_coerce(__p1, int8x16_t)), \
41527 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t]: __arm_vmaxavq_s16 (__p0, __ARM_mve_coerce(__p1, int16x8_t)), \
41528 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t]: __arm_vmaxavq_s32 (__p0, __ARM_mve_coerce(__p1, int32x4_t)));})
41530 #define __arm_vmaxavq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41531 __typeof(p1) __p1 = (p1); \
41532 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41533 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int8x16_t]: __arm_vmaxavq_p_s8 (__p0, __ARM_mve_coerce(__p1, int8x16_t), p2), \
41534 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t]: __arm_vmaxavq_p_s16 (__p0, __ARM_mve_coerce(__p1, int16x8_t), p2), \
41535 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t]: __arm_vmaxavq_p_s32 (__p0, __ARM_mve_coerce(__p1, int32x4_t), p2));})
41537 #define __arm_vmaxvq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41538 __typeof(p1) __p1 = (p1); \
41539 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41540 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int8x16_t]: __arm_vmaxvq_s8 (__p0, __ARM_mve_coerce(__p1, int8x16_t)), \
41541 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t]: __arm_vmaxvq_s16 (__p0, __ARM_mve_coerce(__p1, int16x8_t)), \
41542 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t]: __arm_vmaxvq_s32 (__p0, __ARM_mve_coerce(__p1, int32x4_t)), \
41543 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint8x16_t]: __arm_vmaxvq_u8 (__p0, __ARM_mve_coerce(__p1, uint8x16_t)), \
41544 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint16x8_t]: __arm_vmaxvq_u16 (__p0, __ARM_mve_coerce(__p1, uint16x8_t)), \
41545 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t]: __arm_vmaxvq_u32 (__p0,__ARM_mve_coerce(__p1, uint32x4_t)));})
41547 #define __arm_vmaxvq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41548 __typeof(p1) __p1 = (p1); \
41549 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41550 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int8x16_t]: __arm_vmaxvq_p_s8 (__p0, __ARM_mve_coerce(__p1, int8x16_t), p2), \
41551 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t]: __arm_vmaxvq_p_s16 (__p0, __ARM_mve_coerce(__p1, int16x8_t), p2), \
41552 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t]: __arm_vmaxvq_p_s32 (__p0, __ARM_mve_coerce(__p1, int32x4_t), p2), \
41553 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint8x16_t]: __arm_vmaxvq_p_u8 (__p0, __ARM_mve_coerce(__p1, uint8x16_t), p2), \
41554 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint16x8_t]: __arm_vmaxvq_p_u16 (__p0, __ARM_mve_coerce(__p1, uint16x8_t), p2), \
41555 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t]: __arm_vmaxvq_p_u32 (__p0, __ARM_mve_coerce(__p1, uint32x4_t), p2));})
41557 #define __arm_vminavq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41558 __typeof(p1) __p1 = (p1); \
41559 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41560 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int8x16_t]: __arm_vminavq_s8 (__p0, __ARM_mve_coerce(__p1, int8x16_t)), \
41561 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t]: __arm_vminavq_s16 (__p0, __ARM_mve_coerce(__p1, int16x8_t)), \
41562 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t]: __arm_vminavq_s32 (__p0, __ARM_mve_coerce(__p1, int32x4_t)));})
41564 #define __arm_vminavq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41565 __typeof(p1) __p1 = (p1); \
41566 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41567 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int8x16_t]: __arm_vminavq_p_s8 (__p0, __ARM_mve_coerce(__p1, int8x16_t), p2), \
41568 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t]: __arm_vminavq_p_s16 (__p0, __ARM_mve_coerce(__p1, int16x8_t), p2), \
41569 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t]: __arm_vminavq_p_s32 (__p0, __ARM_mve_coerce(__p1, int32x4_t), p2));})
41571 #define __arm_vmaxq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
41572 __typeof(p2) __p2 = (p2); \
41573 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41574 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmaxq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41575 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmaxq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41576 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmaxq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41577 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmaxq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41578 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmaxq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
41579 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmaxq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
41581 #define __arm_vminq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
41582 __typeof(p2) __p2 = (p2); \
41583 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41584 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vminq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41585 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vminq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41586 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vminq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41587 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vminq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41588 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vminq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
41589 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vminq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
41591 #define __arm_vminvq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41592 __typeof(p1) __p1 = (p1); \
41593 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41594 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int8x16_t]: __arm_vminvq_s8 (__p0, __ARM_mve_coerce(__p1, int8x16_t)), \
41595 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t]: __arm_vminvq_s16 (__p0, __ARM_mve_coerce(__p1, int16x8_t)), \
41596 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t]: __arm_vminvq_s32 (__p0, __ARM_mve_coerce(__p1, int32x4_t)), \
41597 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint8x16_t]: __arm_vminvq_u8 (__p0, __ARM_mve_coerce(__p1, uint8x16_t)), \
41598 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint16x8_t]: __arm_vminvq_u16 (__p0, __ARM_mve_coerce(__p1, uint16x8_t)), \
41599 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t]: __arm_vminvq_u32 (__p0, __ARM_mve_coerce(__p1, uint32x4_t)));})
41601 #define __arm_vminvq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41602 __typeof(p1) __p1 = (p1); \
41603 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41604 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int8x16_t]: __arm_vminvq_p_s8 (__p0, __ARM_mve_coerce(__p1, int8x16_t), p2), \
41605 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t]: __arm_vminvq_p_s16 (__p0, __ARM_mve_coerce(__p1, int16x8_t), p2), \
41606 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t]: __arm_vminvq_p_s32 (__p0, __ARM_mve_coerce(__p1, int32x4_t), p2), \
41607 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint8x16_t]: __arm_vminvq_p_u8 (__p0, __ARM_mve_coerce(__p1, uint8x16_t), p2), \
41608 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint16x8_t]: __arm_vminvq_p_u16 (__p0, __ARM_mve_coerce(__p1, uint16x8_t), p2), \
41609 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t]: __arm_vminvq_p_u32 (__p0, __ARM_mve_coerce(__p1, uint32x4_t), p2));})
41611 #define __arm_vmladavaq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41612 __typeof(p1) __p1 = (p1); \
41613 __typeof(p2) __p2 = (p2); \
41614 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41615 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmladavaq_s8 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
41616 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmladavaq_s16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
41617 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmladavaq_s32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)), \
41618 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmladavaq_u8 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t)), \
41619 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmladavaq_u16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t)), \
41620 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmladavaq_u32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t)));})
41622 #define __arm_vmladavaq_p(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41623 __typeof(p1) __p1 = (p1); \
41624 __typeof(p2) __p2 = (p2); \
41625 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41626 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmladavaq_p_s8 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41627 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmladavaq_p_s16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41628 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmladavaq_p_s32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41629 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmladavaq_p_u8 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41630 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmladavaq_p_u16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
41631 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmladavaq_p_u32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
41633 #define __arm_vmladavaxq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41634 __typeof(p1) __p1 = (p1); \
41635 __typeof(p2) __p2 = (p2); \
41636 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41637 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmladavaxq_s8 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
41638 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmladavaxq_s16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
41639 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmladavaxq_s32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)), \
41640 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmladavaxq_u8 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t)), \
41641 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmladavaxq_u16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t)), \
41642 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmladavaxq_u32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t)));})
41644 #define __arm_vmladavq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41645 __typeof(p1) __p1 = (p1); \
41646 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41647 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmladavq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
41648 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmladavq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
41649 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmladavq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
41650 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmladavq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
41651 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmladavq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
41652 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmladavq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
41654 #define __arm_vmladavq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41655 __typeof(p1) __p1 = (p1); \
41656 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41657 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmladavq_p_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
41658 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmladavq_p_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
41659 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmladavq_p_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
41660 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmladavq_p_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
41661 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmladavq_p_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
41662 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmladavq_p_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
41664 #define __arm_vmladavxq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41665 __typeof(p1) __p1 = (p1); \
41666 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41667 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmladavxq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
41668 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmladavxq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
41669 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmladavxq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
41670 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmladavxq_u8 (__ARM_mve_coerce(__p0, uint8x16_t), __ARM_mve_coerce(__p1, uint8x16_t)), \
41671 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmladavxq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
41672 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmladavxq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
41674 #define __arm_vmladavxq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41675 __typeof(p1) __p1 = (p1); \
41676 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41677 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmladavxq_p_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t), p2), \
41678 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmladavxq_p_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
41679 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmladavxq_p_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
41681 #define __arm_vmlaldavaq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41682 __typeof(p1) __p1 = (p1); \
41683 __typeof(p2) __p2 = (p2); \
41684 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41685 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlaldavaq_s16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
41686 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlaldavaq_s32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)), \
41687 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmlaldavaq_u16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t)), \
41688 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmlaldavaq_u32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t)));})
41690 #define __arm_vmlaldavaxq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41691 __typeof(p1) __p1 = (p1); \
41692 __typeof(p2) __p2 = (p2); \
41693 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41694 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlaldavaxq_s16 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
41695 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlaldavaxq_s32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
41697 #define __arm_vmlaldavq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41698 __typeof(p1) __p1 = (p1); \
41699 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41700 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlaldavq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
41701 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlaldavq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
41702 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmlaldavq_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t)), \
41703 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmlaldavq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
41705 #define __arm_vmlaldavq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41706 __typeof(p1) __p1 = (p1); \
41707 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41708 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlaldavq_p_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
41709 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlaldavq_p_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
41710 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmlaldavq_p_u16 (__ARM_mve_coerce(__p0, uint16x8_t), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
41711 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmlaldavq_p_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
41713 #define __arm_vmlaldavxq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41714 __typeof(p1) __p1 = (p1); \
41715 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41716 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlaldavxq_p_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
41717 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlaldavxq_p_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
41719 #define __arm_vmlsdavq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41720 __typeof(p1) __p1 = (p1); \
41721 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41722 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmlsdavq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
41723 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlsdavq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
41724 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlsdavq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
41726 #define __arm_vmlsdavxq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41727 __typeof(p1) __p1 = (p1); \
41728 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41729 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmlsdavxq_s8 (__ARM_mve_coerce(__p0, int8x16_t), __ARM_mve_coerce(__p1, int8x16_t)), \
41730 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlsdavxq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
41731 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlsdavxq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
41733 #define __arm_vmlsldavaq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41734 __typeof(p1) __p1 = (p1); \
41735 __typeof(p2) __p2 = (p2); \
41736 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41737 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlsldavaq_s16(__p0, __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
41738 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlsldavaq_s32(__p0, __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
41740 #define __arm_vmlsldavaxq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41741 __typeof(p1) __p1 = (p1); \
41742 __typeof(p2) __p2 = (p2); \
41743 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41744 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlsldavaxq_s16(__p0, __ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
41745 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlsldavaxq_s32(__p0, __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)));})
41747 #define __arm_vmlsldavq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41748 __typeof(p1) __p1 = (p1); \
41749 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41750 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlsldavq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
41751 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlsldavq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
41753 #define __arm_vmlsldavq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41754 __typeof(p1) __p1 = (p1); \
41755 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41756 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlsldavq_p_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
41757 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlsldavq_p_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
41759 #define __arm_vmlsldavxq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41760 __typeof(p1) __p1 = (p1); \
41761 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41762 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlsldavxq_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t)), \
41763 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlsldavxq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)));})
41765 #define __arm_vmlsldavxq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41766 __typeof(p1) __p1 = (p1); \
41767 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41768 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmlsldavxq_p_s16 (__ARM_mve_coerce(__p0, int16x8_t), __ARM_mve_coerce(__p1, int16x8_t), p2), \
41769 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmlsldavxq_p_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2));})
41771 #define __arm_vmovlbq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
41772 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
41773 int (*)[__ARM_mve_type_int8x16_t]: __arm_vmovlbq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2), \
41774 int (*)[__ARM_mve_type_int16x8_t]: __arm_vmovlbq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2), \
41775 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vmovlbq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2), \
41776 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vmovlbq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2));})
41778 #define __arm_vmovltq_x(p1,p2) ({ __typeof(p1) __p1 = (p1); \
41779 _Generic( (int (*)[__ARM_mve_typeid(__p1)])0, \
41780 int (*)[__ARM_mve_type_int8x16_t]: __arm_vmovltq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), p2), \
41781 int (*)[__ARM_mve_type_int16x8_t]: __arm_vmovltq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), p2), \
41782 int (*)[__ARM_mve_type_uint8x16_t]: __arm_vmovltq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), p2), \
41783 int (*)[__ARM_mve_type_uint16x8_t]: __arm_vmovltq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), p2));})
41785 #define __arm_vmulhq_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
41786 __typeof(p2) __p2 = (p2); \
41787 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41788 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmulhq_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41789 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmulhq_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41790 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmulhq_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41791 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulhq_x_u8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41792 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulhq_x_u16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
41793 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmulhq_x_u32 (__ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
41795 #define __arm_vmullbq_int_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
41796 __typeof(p2) __p2 = (p2); \
41797 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41798 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmullbq_int_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41799 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmullbq_int_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41800 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmullbq_int_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41801 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmullbq_int_x_u8( __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41802 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmullbq_int_x_u16( __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
41803 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmullbq_int_x_u32( __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
41805 #define __arm_vmullbq_poly_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
41806 __typeof(p2) __p2 = (p2); \
41807 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41808 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmullbq_poly_x_p8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41809 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmullbq_poly_x_p16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3));})
41811 #define __arm_vmulltq_int_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
41812 __typeof(p2) __p2 = (p2); \
41813 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41814 int (*)[__ARM_mve_type_int8x16_t][__ARM_mve_type_int8x16_t]: __arm_vmulltq_int_x_s8 (__ARM_mve_coerce(__p1, int8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41815 int (*)[__ARM_mve_type_int16x8_t][__ARM_mve_type_int16x8_t]: __arm_vmulltq_int_x_s16 (__ARM_mve_coerce(__p1, int16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41816 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vmulltq_int_x_s32 (__ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41817 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulltq_int_x_u8( __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41818 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulltq_int_x_u16( __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
41819 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vmulltq_int_x_u32( __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
41821 #define __arm_vmulltq_poly_x(p1,p2,p3) ({ __typeof(p1) __p1 = (p1); \
41822 __typeof(p2) __p2 = (p2); \
41823 _Generic( (int (*)[__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41824 int (*)[__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vmulltq_poly_x_p8 (__ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41825 int (*)[__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vmulltq_poly_x_p16 (__ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3));})
41827 #define __arm_vrmlaldavhaxq(p0,p1,p2) __arm_vrmlaldavhaxq_s32(p0,p1,p2)
41829 #define __arm_vrmlaldavhq(p0,p1) ({ __typeof(p0) __p0 = (p0); \
41830 __typeof(p1) __p1 = (p1); \
41831 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41832 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrmlaldavhq_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t)), \
41833 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vrmlaldavhq_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t)));})
41835 #define __arm_vrmlaldavhq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41836 __typeof(p1) __p1 = (p1); \
41837 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41838 int (*)[__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrmlaldavhq_p_s32 (__ARM_mve_coerce(__p0, int32x4_t), __ARM_mve_coerce(__p1, int32x4_t), p2), \
41839 int (*)[__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vrmlaldavhq_p_u32 (__ARM_mve_coerce(__p0, uint32x4_t), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
41841 #define __arm_vrmlaldavhxq(p0,p1) __arm_vrmlaldavhxq_s32(p0,p1)
41843 #define __arm_vrmlaldavhxq_p(p0,p1,p2) __arm_vrmlaldavhxq_p_s32(p0,p1,p2)
41845 #define __arm_vrmlsldavhaq(p0,p1,p2) __arm_vrmlsldavhaq_s32(p0,p1,p2)
41847 #define __arm_vrmlsldavhaxq(p0,p1,p2) __arm_vrmlsldavhaxq_s32(p0,p1,p2)
41849 #define __arm_vrmlsldavhq(p0,p1) __arm_vrmlsldavhq_s32(p0,p1)
41851 #define __arm_vrmlsldavhq_p(p0,p1,p2) __arm_vrmlsldavhq_p_s32(p0,p1,p2)
41853 #define __arm_vrmlsldavhxq(p0,p1) __arm_vrmlsldavhxq_s32(p0,p1)
41855 #define __arm_vrmlsldavhxq_p(p0,p1,p2) __arm_vrmlsldavhxq_p_s32(p0,p1,p2)
41857 #define __arm_vstrbq(p0,p1) ({ __typeof(p1) __p1 = (p1); \
41858 _Generic( (int (*)[__ARM_mve_typeid(p0)][__ARM_mve_typeid(__p1)])0, \
41859 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_int8x16_t]: __arm_vstrbq_s8 (__ARM_mve_coerce(p0, int8_t *), __ARM_mve_coerce(__p1, int8x16_t)), \
41860 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_int16x8_t]: __arm_vstrbq_s16 (__ARM_mve_coerce(p0, int8_t *), __ARM_mve_coerce(__p1, int16x8_t)), \
41861 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrbq_s32 (__ARM_mve_coerce(p0, int8_t *), __ARM_mve_coerce(__p1, int32x4_t)), \
41862 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint8x16_t]: __arm_vstrbq_u8 (__ARM_mve_coerce(p0, uint8_t *), __ARM_mve_coerce(__p1, uint8x16_t)), \
41863 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vstrbq_u16 (__ARM_mve_coerce(p0, uint8_t *), __ARM_mve_coerce(__p1, uint16x8_t)), \
41864 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrbq_u32 (__ARM_mve_coerce(p0, uint8_t *), __ARM_mve_coerce(__p1, uint32x4_t)));})
41866 #define __arm_vstrbq_p(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41867 __typeof(p1) __p1 = (p1); \
41868 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)])0, \
41869 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_int8x16_t]: __arm_vstrbq_p_s8 (__ARM_mve_coerce(__p0, int8_t *), __ARM_mve_coerce(__p1, int8x16_t), p2), \
41870 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_int16x8_t]: __arm_vstrbq_p_s16 (__ARM_mve_coerce(__p0, int8_t *), __ARM_mve_coerce(__p1, int16x8_t), p2), \
41871 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_int32x4_t]: __arm_vstrbq_p_s32 (__ARM_mve_coerce(__p0, int8_t *), __ARM_mve_coerce(__p1, int32x4_t), p2), \
41872 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint8x16_t]: __arm_vstrbq_p_u8 (__ARM_mve_coerce(__p0, uint8_t *), __ARM_mve_coerce(__p1, uint8x16_t), p2), \
41873 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint16x8_t]: __arm_vstrbq_p_u16 (__ARM_mve_coerce(__p0, uint8_t *), __ARM_mve_coerce(__p1, uint16x8_t), p2), \
41874 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint32x4_t]: __arm_vstrbq_p_u32 (__ARM_mve_coerce(__p0, uint8_t *), __ARM_mve_coerce(__p1, uint32x4_t), p2));})
41876 #define __arm_vstrdq_scatter_base(p0,p1,p2) ({ __typeof(p2) __p2 = (p2); \
41877 _Generic( (int (*)[__ARM_mve_typeid(__p2)])0, \
41878 int (*)[__ARM_mve_type_int64x2_t]: __arm_vstrdq_scatter_base_s64 (p0, p1, __ARM_mve_coerce(__p2, int64x2_t)), \
41879 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vstrdq_scatter_base_u64 (p0, p1, __ARM_mve_coerce(__p2, uint64x2_t)));})
41881 #define __arm_vstrdq_scatter_base_p(p0,p1,p2,p3) ({ __typeof(p2) __p2 = (p2); \
41882 _Generic( (int (*)[__ARM_mve_typeid(__p2)])0, \
41883 int (*)[__ARM_mve_type_int64x2_t]: __arm_vstrdq_scatter_base_p_s64 (p0, p1, __ARM_mve_coerce(__p2, int64x2_t), p3), \
41884 int (*)[__ARM_mve_type_uint64x2_t]: __arm_vstrdq_scatter_base_p_u64 (p0, p1, __ARM_mve_coerce(__p2, uint64x2_t), p3));})
41886 #define __arm_vrmlaldavhaq(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41887 __typeof(p1) __p1 = (p1); \
41888 __typeof(p2) __p2 = (p2); \
41889 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41890 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrmlaldavhaq_s32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t)), \
41891 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vrmlaldavhaq_u32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t)));})
41893 #define __arm_vrmlaldavhaq_p(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41894 __typeof(p1) __p1 = (p1); \
41895 __typeof(p2) __p2 = (p2); \
41896 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41897 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_int32x4_t][__ARM_mve_type_int32x4_t]: __arm_vrmlaldavhaq_p_s32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, int32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41898 int (*)[__ARM_mve_type_int_n][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vrmlaldavhaq_p_u32 (__ARM_mve_coerce3(p0, int), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
41900 #define __arm_vstrbq_scatter_offset(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41901 __typeof(p1) __p1 = (p1); \
41902 __typeof(p2) __p2 = (p2); \
41903 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41904 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vstrbq_scatter_offset_s8 (__ARM_mve_coerce(__p0, int8_t *), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, int8x16_t)), \
41905 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrbq_scatter_offset_s16 (__ARM_mve_coerce(__p0, int8_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t)), \
41906 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrbq_scatter_offset_s32 (__ARM_mve_coerce(__p0, int8_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t)), \
41907 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vstrbq_scatter_offset_u8 (__ARM_mve_coerce(__p0, uint8_t *), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t)), \
41908 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrbq_scatter_offset_u16 (__ARM_mve_coerce(__p0, uint8_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t)), \
41909 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrbq_scatter_offset_u32 (__ARM_mve_coerce(__p0, uint8_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t)));})
41911 #define __arm_vstrbq_scatter_offset_p(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41912 __typeof(p1) __p1 = (p1); \
41913 __typeof(p2) __p2 = (p2); \
41914 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p1)][__ARM_mve_typeid(__p2)])0, \
41915 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_uint8x16_t][__ARM_mve_type_int8x16_t]: __arm_vstrbq_scatter_offset_p_s8 (__ARM_mve_coerce(__p0, int8_t *), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, int8x16_t), p3), \
41916 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_int16x8_t]: __arm_vstrbq_scatter_offset_p_s16 (__ARM_mve_coerce(__p0, int8_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, int16x8_t), p3), \
41917 int (*)[__ARM_mve_type_int8_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_int32x4_t]: __arm_vstrbq_scatter_offset_p_s32 (__ARM_mve_coerce(__p0, int8_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, int32x4_t), p3), \
41918 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint8x16_t][__ARM_mve_type_uint8x16_t]: __arm_vstrbq_scatter_offset_p_u8 (__ARM_mve_coerce(__p0, uint8_t *), __ARM_mve_coerce(__p1, uint8x16_t), __ARM_mve_coerce(__p2, uint8x16_t), p3), \
41919 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint16x8_t][__ARM_mve_type_uint16x8_t]: __arm_vstrbq_scatter_offset_p_u16 (__ARM_mve_coerce(__p0, uint8_t *), __ARM_mve_coerce(__p1, uint16x8_t), __ARM_mve_coerce(__p2, uint16x8_t), p3), \
41920 int (*)[__ARM_mve_type_uint8_t_ptr][__ARM_mve_type_uint32x4_t][__ARM_mve_type_uint32x4_t]: __arm_vstrbq_scatter_offset_p_u32 (__ARM_mve_coerce(__p0, uint8_t *), __ARM_mve_coerce(__p1, uint32x4_t), __ARM_mve_coerce(__p2, uint32x4_t), p3));})
41922 #define __arm_vstrdq_scatter_offset_p(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41923 __typeof(p2) __p2 = (p2); \
41924 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p2)])0, \
41925 int (*)[__ARM_mve_type_int64_t_ptr][__ARM_mve_type_int64x2_t]: __arm_vstrdq_scatter_offset_p_s64 (__ARM_mve_coerce(__p0, int64_t *), p1, __ARM_mve_coerce(__p2, int64x2_t), p3), \
41926 int (*)[__ARM_mve_type_uint64_t_ptr][__ARM_mve_type_uint64x2_t]: __arm_vstrdq_scatter_offset_p_u64 (__ARM_mve_coerce(__p0, uint64_t *), p1, __ARM_mve_coerce(__p2, uint64x2_t), p3));})
41928 #define __arm_vstrdq_scatter_offset(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41929 __typeof(p2) __p2 = (p2); \
41930 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p2)])0, \
41931 int (*)[__ARM_mve_type_int64_t_ptr][__ARM_mve_type_int64x2_t]: __arm_vstrdq_scatter_offset_s64 (__ARM_mve_coerce(__p0, int64_t *), p1, __ARM_mve_coerce(__p2, int64x2_t)), \
41932 int (*)[__ARM_mve_type_uint64_t_ptr][__ARM_mve_type_uint64x2_t]: __arm_vstrdq_scatter_offset_u64 (__ARM_mve_coerce(__p0, uint64_t *), p1, __ARM_mve_coerce(__p2, uint64x2_t)));})
41934 #define __arm_vstrdq_scatter_shifted_offset_p(p0,p1,p2,p3) ({ __typeof(p0) __p0 = (p0); \
41935 __typeof(p2) __p2 = (p2); \
41936 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p2)])0, \
41937 int (*)[__ARM_mve_type_int64_t_ptr][__ARM_mve_type_int64x2_t]: __arm_vstrdq_scatter_shifted_offset_p_s64 (__ARM_mve_coerce(__p0, int64_t *), p1, __ARM_mve_coerce(__p2, int64x2_t), p3), \
41938 int (*)[__ARM_mve_type_uint64_t_ptr][__ARM_mve_type_uint64x2_t]: __arm_vstrdq_scatter_shifted_offset_p_u64 (__ARM_mve_coerce(__p0, uint64_t *), p1, __ARM_mve_coerce(__p2, uint64x2_t), p3));})
41940 #define __arm_vstrdq_scatter_shifted_offset(p0,p1,p2) ({ __typeof(p0) __p0 = (p0); \
41941 __typeof(p2) __p2 = (p2); \
41942 _Generic( (int (*)[__ARM_mve_typeid(__p0)][__ARM_mve_typeid(__p2)])0, \
41943 int (*)[__ARM_mve_type_int64_t_ptr][__ARM_mve_type_int64x2_t]: __arm_vstrdq_scatter_shifted_offset_s64 (__ARM_mve_coerce(__p0, int64_t *), p1, __ARM_mve_coerce(__p2, int64x2_t)), \
41944 int (*)[__ARM_mve_type_uint64_t_ptr][__ARM_mve_type_uint64x2_t]: __arm_vstrdq_scatter_shifted_offset_u64 (__ARM_mve_coerce(__p0, uint64_t *), p1, __ARM_mve_coerce(__p2, uint64x2_t)));})
41946 #endif /* __cplusplus */
41947 #endif /* __ARM_FEATURE_MVE */
41948 #endif /* _GCC_ARM_MVE_H. */