]> git.ipfire.org Git - thirdparty/gcc.git/blob - libatomic/config/x86/init.c
Update copyright years.
[thirdparty/gcc.git] / libatomic / config / x86 / init.c
1 /* Copyright (C) 2012-2024 Free Software Foundation, Inc.
2 Contributed by Richard Henderson <rth@redhat.com>.
3
4 This file is part of the GNU Atomic Library (libatomic).
5
6 Libatomic is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3 of the License, or
9 (at your option) any later version.
10
11 Libatomic is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
13 FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 more details.
15
16 Under Section 7 of GPL version 3, you are granted additional
17 permissions described in the GCC Runtime Library Exception, version
18 3.1, as published by the Free Software Foundation.
19
20 You should have received a copy of the GNU General Public License and
21 a copy of the GCC Runtime Library Exception along with this program;
22 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "libatomic_i.h"
26
27 #if HAVE_IFUNC
28
29 unsigned int __libat_feat1;
30
31 unsigned int
32 __libat_feat1_init (void)
33 {
34 unsigned int eax, ebx, ecx, edx;
35 FEAT1_REGISTER = 0;
36 __get_cpuid (1, &eax, &ebx, &ecx, &edx);
37 #ifdef __x86_64__
38 if ((FEAT1_REGISTER & (bit_AVX | bit_CMPXCHG16B))
39 == (bit_AVX | bit_CMPXCHG16B))
40 {
41 /* Intel SDM guarantees that 16-byte VMOVDQA on 16-byte aligned address
42 is atomic, and AMD is going to do something similar soon.
43 We don't have a guarantee from vendors of other CPUs with AVX,
44 like Zhaoxin and VIA. */
45 unsigned int ecx2 = 0;
46 __get_cpuid (0, &eax, &ebx, &ecx2, &edx);
47 if (ecx2 != signature_INTEL_ecx && ecx2 != signature_AMD_ecx)
48 FEAT1_REGISTER &= ~bit_AVX;
49 }
50 #endif
51 /* See the load in load_feat1. */
52 __atomic_store_n (&__libat_feat1, FEAT1_REGISTER, __ATOMIC_RELAXED);
53 return FEAT1_REGISTER;
54 }
55
56 #endif /* HAVE_IFUNC */