From: Oliver Kurth Date: Mon, 17 Jun 2019 18:41:37 +0000 (-0700) Subject: Changes to common header files not applicable to open-vm-tools. X-Git-Tag: stable-11.0.0~41 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=d2213fb32a1357305e72cbd6856f5e5ea9e151da;p=thirdparty%2Fopen-vm-tools.git Changes to common header files not applicable to open-vm-tools. Remove checks for GCC 4.1 in xsave, xrstor, etc. instructions --- diff --git a/open-vm-tools/lib/include/vm_basic_asm_x86.h b/open-vm-tools/lib/include/vm_basic_asm_x86.h index f3102e3f9..167f9815e 100644 --- a/open-vm-tools/lib/include/vm_basic_asm_x86.h +++ b/open-vm-tools/lib/include/vm_basic_asm_x86.h @@ -1,5 +1,5 @@ /********************************************************* - * Copyright (C) 1998-2018 VMware, Inc. All rights reserved. + * Copyright (C) 1998-2019 VMware, Inc. All rights reserved. * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published @@ -158,49 +158,32 @@ FXRSTOR_AMD_ES0(const void *load) static INLINE void XSAVE_ES1(void *save, uint64 mask) { -#if __GNUC__ < 4 || __GNUC__ == 4 && __GNUC_MINOR__ == 1 - __asm__ __volatile__ ( - ".byte 0x0f, 0xae, 0x21 \n" - : - : "c" ((uint8 *)save), "a" ((uint32)mask), "d" ((uint32)(mask >> 32)) - : "memory"); -#else __asm__ __volatile__ ( "xsave %0 \n" : "=m" (*(uint8 *)save) : "a" ((uint32)mask), "d" ((uint32)(mask >> 32)) : "memory"); -#endif } static INLINE void XSAVEOPT_ES1(void *save, uint64 mask) { __asm__ __volatile__ ( - ".byte 0x0f, 0xae, 0x31 \n" - : - : "c" ((uint8 *)save), "a" ((uint32)mask), "d" ((uint32)(mask >> 32)) + "xsaveopt %0 \n" + : "=m" (*(uint8 *)save) + : "a" ((uint32)mask), "d" ((uint32)(mask >> 32)) : "memory"); } static INLINE void XRSTOR_ES1(const void *load, uint64 mask) { -#if __GNUC__ < 4 || __GNUC__ == 4 && __GNUC_MINOR__ == 1 - __asm__ __volatile__ ( - ".byte 0x0f, 0xae, 0x29 \n" - : - : "c" ((const uint8 *)load), - "a" ((uint32)mask), "d" ((uint32)(mask >> 32)) - : "memory"); -#else __asm__ __volatile__ ( "xrstor %0 \n" : : "m" (*(const uint8 *)load), "a" ((uint32)mask), "d" ((uint32)(mask >> 32)) : "memory"); -#endif } static INLINE void @@ -218,17 +201,10 @@ XRSTOR_AMD_ES0(const void *load, uint64 mask) "fildl %0 \n" // Dummy Load from "safe address" changes all // x87 exception pointers. "mov %%ebx, %%eax \n" -#if __GNUC__ < 4 || __GNUC__ == 4 && __GNUC_MINOR__ == 1 - ".byte 0x0f, 0xae, 0x29 \n" - : - : "m" (dummy), "c" ((const uint8 *)load), - "b" ((uint32)mask), "d" ((uint32)(mask >> 32)) -#else "xrstor %1 \n" : : "m" (dummy), "m" (*(const uint8 *)load), "b" ((uint32)mask), "d" ((uint32)(mask >> 32)) -#endif : "eax", "memory"); } #endif /* __GNUC__ */ diff --git a/open-vm-tools/lib/include/vm_basic_asm_x86_64.h b/open-vm-tools/lib/include/vm_basic_asm_x86_64.h index 55d88d642..d0be222d8 100644 --- a/open-vm-tools/lib/include/vm_basic_asm_x86_64.h +++ b/open-vm-tools/lib/include/vm_basic_asm_x86_64.h @@ -1,5 +1,5 @@ /********************************************************* - * Copyright (C) 1998-2018 VMware, Inc. All rights reserved. + * Copyright (C) 1998-2019 VMware, Inc. All rights reserved. * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published @@ -189,87 +189,53 @@ FXRSTOR_AMD_ES0(const void *load) static INLINE void XSAVE_ES1(void *save, uint64 mask) { -#if __GNUC__ < 4 || __GNUC__ == 4 && __GNUC_MINOR__ == 1 - __asm__ __volatile__ ( - ".byte 0x48, 0x0f, 0xae, 0x21 \n" - : - : "c" ((uint8 *)save), "a" ((uint32)mask), "d" ((uint32)(mask >> 32)) - : "memory"); -#else __asm__ __volatile__ ( "xsaveq %0 \n" : "=m" (*(uint8 *)save) : "a" ((uint32)mask), "d" ((uint32)(mask >> 32)) : "memory"); -#endif } static INLINE void XSAVE_COMPAT_ES1(void *save, uint64 mask) { -#if __GNUC__ < 4 || __GNUC__ == 4 && __GNUC_MINOR__ == 1 - __asm__ __volatile__ ( - ".byte 0x0f, 0xae, 0x21 \n" - : - : "c" ((uint8 *)save), "a" ((uint32)mask), "d" ((uint32)(mask >> 32)) - : "memory"); -#else __asm__ __volatile__ ( "xsave %0 \n" : "=m" (*(uint8 *)save) : "a" ((uint32)mask), "d" ((uint32)(mask >> 32)) : "memory"); -#endif } static INLINE void XSAVEOPT_ES1(void *save, uint64 mask) { __asm__ __volatile__ ( - ".byte 0x48, 0x0f, 0xae, 0x31 \n" - : - : "c" ((uint8 *)save), "a" ((uint32)mask), "d" ((uint32)(mask >> 32)) + "xsaveoptq %0 \n" + : "=m" (*(uint8 *)save) + : "a" ((uint32)mask), "d" ((uint32)(mask >> 32)) : "memory"); } static INLINE void XRSTOR_ES1(const void *load, uint64 mask) { -#if __GNUC__ < 4 || __GNUC__ == 4 && __GNUC_MINOR__ == 1 - __asm__ __volatile__ ( - ".byte 0x48, 0x0f, 0xae, 0x29 \n" - : - : "c" ((const uint8 *)load), - "a" ((uint32)mask), "d" ((uint32)(mask >> 32)) - : "memory"); -#else __asm__ __volatile__ ( "xrstorq %0 \n" : : "m" (*(const uint8 *)load), "a" ((uint32)mask), "d" ((uint32)(mask >> 32)) : "memory"); -#endif } static INLINE void XRSTOR_COMPAT_ES1(const void *load, uint64 mask) { -#if __GNUC__ < 4 || __GNUC__ == 4 && __GNUC_MINOR__ == 1 - __asm__ __volatile__ ( - ".byte 0x0f, 0xae, 0x29 \n" - : - : "c" ((const uint8 *)load), - "a" ((uint32)mask), "d" ((uint32)(mask >> 32)) - : "memory"); -#else __asm__ __volatile__ ( "xrstor %0 \n" : : "m" (*(const uint8 *)load), "a" ((uint32)mask), "d" ((uint32)(mask >> 32)) : "memory"); -#endif } static INLINE void @@ -287,17 +253,10 @@ XRSTOR_AMD_ES0(const void *load, uint64 mask) "fildl %0 \n" // Dummy Load from "safe address" changes all // x87 exception pointers. "mov %%ebx, %%eax \n" -#if __GNUC__ < 4 || __GNUC__ == 4 && __GNUC_MINOR__ == 1 - ".byte 0x48, 0x0f, 0xae, 0x29 \n" - : - : "m" (dummy), "c" ((const uint8 *)load), - "b" ((uint32)mask), "d" ((uint32)(mask >> 32)) -#else "xrstorq %1 \n" : : "m" (dummy), "m" (*(const uint8 *)load), "b" ((uint32)mask), "d" ((uint32)(mask >> 32)) -#endif : "eax", "memory"); }