From 5cd7af016d8587ff53b20ba259746f97edbddbf7 Mon Sep 17 00:00:00 2001 From: "H.J. Lu" Date: Sun, 3 Apr 2016 14:32:20 -0700 Subject: [PATCH] Don't put SSE2/AVX/AVX512 memmove/memset in ld.so Since memmove and memset in ld.so don't use IFUNC, don't put SSE2, AVX and AVX512 memmove and memset in ld.so. * sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S: Skip if not in libc. * sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S: Likewise. * sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S: Likewise. * sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S: Likewise. --- ChangeLog | 11 +++++++++++ .../multiarch/memmove-avx-unaligned-erms.S | 16 +++++++++------- .../multiarch/memmove-avx512-unaligned-erms.S | 2 +- .../multiarch/memmove-sse2-unaligned-erms.S | 16 +++++++++------- .../multiarch/memset-avx2-unaligned-erms.S | 18 ++++++++++-------- .../multiarch/memset-avx512-unaligned-erms.S | 2 +- .../multiarch/memset-sse2-unaligned-erms.S | 18 ++++++++++-------- 7 files changed, 51 insertions(+), 32 deletions(-) diff --git a/ChangeLog b/ChangeLog index e93b7bf3b1..befaa41a66 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,14 @@ +2016-04-03 H.J. Lu + + * sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S: Skip + if not in libc. + * sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S: + Likewise. + * sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S: + Likewise. + * sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S: + Likewise. + 2016-04-03 H.J. Lu * sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S: diff --git a/sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S index 3a72c7eafd..44711c37ca 100644 --- a/sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S +++ b/sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S @@ -1,9 +1,11 @@ -#define VEC_SIZE 32 -#define VEC(i) ymm##i -#define VMOVU vmovdqu -#define VMOVA vmovdqa +#if IS_IN (libc) +# define VEC_SIZE 32 +# define VEC(i) ymm##i +# define VMOVU vmovdqu +# define VMOVA vmovdqa -#define SECTION(p) p##.avx -#define MEMMOVE_SYMBOL(p,s) p##_avx_##s +# define SECTION(p) p##.avx +# define MEMMOVE_SYMBOL(p,s) p##_avx_##s -#include "memmove-vec-unaligned-erms.S" +# include "memmove-vec-unaligned-erms.S" +#endif diff --git a/sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S index 38358fa37c..c2c52937bf 100644 --- a/sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S +++ b/sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S @@ -1,4 +1,4 @@ -#ifdef HAVE_AVX512_ASM_SUPPORT +#if defined HAVE_AVX512_ASM_SUPPORT && IS_IN (libc) # define VEC_SIZE 64 # define VEC(i) zmm##i # define VMOVU vmovdqu64 diff --git a/sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S index 52b9ae08fc..85214fe725 100644 --- a/sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S +++ b/sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S @@ -1,9 +1,11 @@ -#define VEC_SIZE 16 -#define VEC(i) xmm##i -#define VMOVU movdqu -#define VMOVA movdqa +#if IS_IN (libc) +# define VEC_SIZE 16 +# define VEC(i) xmm##i +# define VMOVU movdqu +# define VMOVA movdqa -#define SECTION(p) p -#define MEMMOVE_SYMBOL(p,s) p##_sse2_##s +# define SECTION(p) p +# define MEMMOVE_SYMBOL(p,s) p##_sse2_##s -#include "memmove-vec-unaligned-erms.S" +# include "memmove-vec-unaligned-erms.S" +#endif diff --git a/sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S b/sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S index e0dc56512e..79975e0825 100644 --- a/sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S +++ b/sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S @@ -1,14 +1,16 @@ -#define VEC_SIZE 32 -#define VEC(i) ymm##i -#define VMOVU vmovdqu -#define VMOVA vmovdqa +#if IS_IN (libc) +# define VEC_SIZE 32 +# define VEC(i) ymm##i +# define VMOVU vmovdqu +# define VMOVA vmovdqa -#define VDUP_TO_VEC0_AND_SET_RETURN(d, r) \ +# define VDUP_TO_VEC0_AND_SET_RETURN(d, r) \ vmovd d, %xmm0; \ movq r, %rax; \ vpbroadcastb %xmm0, %ymm0 -#define SECTION(p) p##.avx -#define MEMSET_SYMBOL(p,s) p##_avx2_##s +# define SECTION(p) p##.avx +# define MEMSET_SYMBOL(p,s) p##_avx2_##s -#include "memset-vec-unaligned-erms.S" +# include "memset-vec-unaligned-erms.S" +#endif diff --git a/sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S b/sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S index 72f4095831..f1b3cb23d3 100644 --- a/sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S +++ b/sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S @@ -1,4 +1,4 @@ -#ifdef HAVE_AVX512_ASM_SUPPORT +#if defined HAVE_AVX512_ASM_SUPPORT && IS_IN (libc) # define VEC_SIZE 64 # define VEC(i) zmm##i # define VMOVU vmovdqu64 diff --git a/sysdeps/x86_64/multiarch/memset-sse2-unaligned-erms.S b/sysdeps/x86_64/multiarch/memset-sse2-unaligned-erms.S index 437a858dab..2deba42c16 100644 --- a/sysdeps/x86_64/multiarch/memset-sse2-unaligned-erms.S +++ b/sysdeps/x86_64/multiarch/memset-sse2-unaligned-erms.S @@ -1,16 +1,18 @@ -#define VEC_SIZE 16 -#define VEC(i) xmm##i -#define VMOVU movdqu -#define VMOVA movdqa +#if IS_IN (libc) +# define VEC_SIZE 16 +# define VEC(i) xmm##i +# define VMOVU movdqu +# define VMOVA movdqa -#define VDUP_TO_VEC0_AND_SET_RETURN(d, r) \ +# define VDUP_TO_VEC0_AND_SET_RETURN(d, r) \ movd d, %xmm0; \ movq r, %rax; \ punpcklbw %xmm0, %xmm0; \ punpcklwd %xmm0, %xmm0; \ pshufd $0, %xmm0, %xmm0 -#define SECTION(p) p -#define MEMSET_SYMBOL(p,s) p##_sse2_##s +# define SECTION(p) p +# define MEMSET_SYMBOL(p,s) p##_sse2_##s -#include "memset-vec-unaligned-erms.S" +# include "memset-vec-unaligned-erms.S" +#endif