]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/i386/mm3dnow.h
tmmintrin.h (_mm_alignr_epi32): Implement as always inlined function, not as a macro.
[thirdparty/gcc.git] / gcc / config / i386 / mm3dnow.h
1 /* Copyright (C) 2004 Free Software Foundation, Inc.
2
3 This file is part of GCC.
4
5 GCC is free software; you can redistribute it and/or modify
6 it under the terms of the GNU General Public License as published by
7 the Free Software Foundation; either version 2, or (at your option)
8 any later version.
9
10 GCC is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 GNU General Public License for more details.
14
15 You should have received a copy of the GNU General Public License
16 along with GCC; see the file COPYING. If not, write to
17 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
18 Boston, MA 02110-1301, USA. */
19
20 /* As a special exception, if you include this header file into source
21 files compiled by GCC, this header file does not by itself cause
22 the resulting executable to be covered by the GNU General Public
23 License. This exception does not however invalidate any other
24 reasons why the executable file might be covered by the GNU General
25 Public License. */
26
27 /* Implemented from the mm3dnow.h (of supposedly AMD origin) included with
28 MSVC 7.1. */
29
30 #ifndef _MM3DNOW_H_INCLUDED
31 #define _MM3DNOW_H_INCLUDED
32
33 #ifdef __3dNOW__
34
35 #include <mmintrin.h>
36
37 /* Internal data types for implementing the intrinsics. */
38 typedef float __v2sf __attribute__ ((__vector_size__ (8)));
39
40 static __inline void __attribute__((__always_inline__))
41 _m_femms (void)
42 {
43 __builtin_ia32_femms();
44 }
45
46 static __inline __m64 __attribute__((__always_inline__))
47 _m_pavgusb (__m64 __A, __m64 __B)
48 {
49 return (__m64)__builtin_ia32_pavgusb ((__v8qi)__A, (__v8qi)__B);
50 }
51
52 static __inline __m64 __attribute__((__always_inline__))
53 _m_pf2id (__m64 __A)
54 {
55 return (__m64)__builtin_ia32_pf2id ((__v2sf)__A);
56 }
57
58 static __inline __m64 __attribute__((__always_inline__))
59 _m_pfacc (__m64 __A, __m64 __B)
60 {
61 return (__m64)__builtin_ia32_pfacc ((__v2sf)__A, (__v2sf)__B);
62 }
63
64 static __inline __m64 __attribute__((__always_inline__))
65 _m_pfadd (__m64 __A, __m64 __B)
66 {
67 return (__m64)__builtin_ia32_pfadd ((__v2sf)__A, (__v2sf)__B);
68 }
69
70 static __inline __m64 __attribute__((__always_inline__))
71 _m_pfcmpeq (__m64 __A, __m64 __B)
72 {
73 return (__m64)__builtin_ia32_pfcmpeq ((__v2sf)__A, (__v2sf)__B);
74 }
75
76 static __inline __m64 __attribute__((__always_inline__))
77 _m_pfcmpge (__m64 __A, __m64 __B)
78 {
79 return (__m64)__builtin_ia32_pfcmpge ((__v2sf)__A, (__v2sf)__B);
80 }
81
82 static __inline __m64 __attribute__((__always_inline__))
83 _m_pfcmpgt (__m64 __A, __m64 __B)
84 {
85 return (__m64)__builtin_ia32_pfcmpgt ((__v2sf)__A, (__v2sf)__B);
86 }
87
88 static __inline __m64 __attribute__((__always_inline__))
89 _m_pfmax (__m64 __A, __m64 __B)
90 {
91 return (__m64)__builtin_ia32_pfmax ((__v2sf)__A, (__v2sf)__B);
92 }
93
94 static __inline __m64 __attribute__((__always_inline__))
95 _m_pfmin (__m64 __A, __m64 __B)
96 {
97 return (__m64)__builtin_ia32_pfmin ((__v2sf)__A, (__v2sf)__B);
98 }
99
100 static __inline __m64 __attribute__((__always_inline__))
101 _m_pfmul (__m64 __A, __m64 __B)
102 {
103 return (__m64)__builtin_ia32_pfmul ((__v2sf)__A, (__v2sf)__B);
104 }
105
106 static __inline __m64 __attribute__((__always_inline__))
107 _m_pfrcp (__m64 __A)
108 {
109 return (__m64)__builtin_ia32_pfrcp ((__v2sf)__A);
110 }
111
112 static __inline __m64 __attribute__((__always_inline__))
113 _m_pfrcpit1 (__m64 __A, __m64 __B)
114 {
115 return (__m64)__builtin_ia32_pfrcpit1 ((__v2sf)__A, (__v2sf)__B);
116 }
117
118 static __inline __m64 __attribute__((__always_inline__))
119 _m_pfrcpit2 (__m64 __A, __m64 __B)
120 {
121 return (__m64)__builtin_ia32_pfrcpit2 ((__v2sf)__A, (__v2sf)__B);
122 }
123
124 static __inline __m64 __attribute__((__always_inline__))
125 _m_pfrsqrt (__m64 __A)
126 {
127 return (__m64)__builtin_ia32_pfrsqrt ((__v2sf)__A);
128 }
129
130 static __inline __m64 __attribute__((__always_inline__))
131 _m_pfrsqit1 (__m64 __A, __m64 __B)
132 {
133 return (__m64)__builtin_ia32_pfrsqit1 ((__v2sf)__A, (__v2sf)__B);
134 }
135
136 static __inline __m64 __attribute__((__always_inline__))
137 _m_pfsub (__m64 __A, __m64 __B)
138 {
139 return (__m64)__builtin_ia32_pfsub ((__v2sf)__A, (__v2sf)__B);
140 }
141
142 static __inline __m64 __attribute__((__always_inline__))
143 _m_pfsubr (__m64 __A, __m64 __B)
144 {
145 return (__m64)__builtin_ia32_pfsubr ((__v2sf)__A, (__v2sf)__B);
146 }
147
148 static __inline __m64 __attribute__((__always_inline__))
149 _m_pi2fd (__m64 __A)
150 {
151 return (__m64)__builtin_ia32_pi2fd ((__v2si)__A);
152 }
153
154 static __inline __m64 __attribute__((__always_inline__))
155 _m_pmulhrw (__m64 __A, __m64 __B)
156 {
157 return (__m64)__builtin_ia32_pmulhrw ((__v4hi)__A, (__v4hi)__B);
158 }
159
160 static __inline void __attribute__((__always_inline__))
161 _m_prefetch (void *__P)
162 {
163 __builtin_prefetch (__P, 0, 3 /* _MM_HINT_T0 */);
164 }
165
166 static __inline void __attribute__((__always_inline__))
167 _m_prefetchw (void *__P)
168 {
169 __builtin_prefetch (__P, 1, 3 /* _MM_HINT_T0 */);
170 }
171
172 static __inline __m64 __attribute__((__always_inline__))
173 _m_from_float (float __A)
174 {
175 return __extension__ (__m64)(__v2sf){ __A, 0.0f };
176 }
177
178 static __inline float __attribute__((__always_inline__))
179 _m_to_float (__m64 __A)
180 {
181 union { __v2sf v; float a[2]; } __tmp;
182 __tmp.v = (__v2sf)__A;
183 return __tmp.a[0];
184 }
185
186 #ifdef __3dNOW_A__
187
188 static __inline __m64 __attribute__((__always_inline__))
189 _m_pf2iw (__m64 __A)
190 {
191 return (__m64)__builtin_ia32_pf2iw ((__v2sf)__A);
192 }
193
194 static __inline __m64 __attribute__((__always_inline__))
195 _m_pfnacc (__m64 __A, __m64 __B)
196 {
197 return (__m64)__builtin_ia32_pfnacc ((__v2sf)__A, (__v2sf)__B);
198 }
199
200 static __inline __m64 __attribute__((__always_inline__))
201 _m_pfpnacc (__m64 __A, __m64 __B)
202 {
203 return (__m64)__builtin_ia32_pfpnacc ((__v2sf)__A, (__v2sf)__B);
204 }
205
206 static __inline __m64 __attribute__((__always_inline__))
207 _m_pi2fw (__m64 __A)
208 {
209 return (__m64)__builtin_ia32_pi2fw ((__v2si)__A);
210 }
211
212 static __inline __m64 __attribute__((__always_inline__))
213 _m_pswapd (__m64 __A)
214 {
215 return (__m64)__builtin_ia32_pswapdsf ((__v2sf)__A);
216 }
217
218 #endif /* __3dNOW_A__ */
219 #endif /* __3dNOW__ */
220
221 #endif /* _MM3DNOW_H_INCLUDED */