Skip to content

Commit

Permalink
Don't put SSE2/AVX/AVX512 memmove/memset in ld.so
Browse files Browse the repository at this point in the history
Since memmove and memset in ld.so don't use IFUNC, don't put SSE2, AVX
and AVX512 memmove and memset in ld.so.

	* sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S: Skip
	if not in libc.
	* sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S:
	Likewise.
	* sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S:
	Likewise.
	* sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S:
	Likewise.
  • Loading branch information
H.J. Lu committed Apr 3, 2016
1 parent ea2785e commit 5cd7af0
Show file tree
Hide file tree
Showing 7 changed files with 51 additions and 32 deletions.
11 changes: 11 additions & 0 deletions ChangeLog
Original file line number Diff line number Diff line change
@@ -1,3 +1,14 @@
2016-04-03 H.J. Lu <hongjiu.lu@intel.com>

* sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S: Skip
if not in libc.
* sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S:
Likewise.
* sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S:
Likewise.
* sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S:
Likewise.

2016-04-03 H.J. Lu <hongjiu.lu@intel.com>

* sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S:
Expand Down
16 changes: 9 additions & 7 deletions sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
#define VEC_SIZE 32
#define VEC(i) ymm##i
#define VMOVU vmovdqu
#define VMOVA vmovdqa
#if IS_IN (libc)
# define VEC_SIZE 32
# define VEC(i) ymm##i
# define VMOVU vmovdqu
# define VMOVA vmovdqa

#define SECTION(p) p##.avx
#define MEMMOVE_SYMBOL(p,s) p##_avx_##s
# define SECTION(p) p##.avx
# define MEMMOVE_SYMBOL(p,s) p##_avx_##s

#include "memmove-vec-unaligned-erms.S"
# include "memmove-vec-unaligned-erms.S"
#endif
2 changes: 1 addition & 1 deletion sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#ifdef HAVE_AVX512_ASM_SUPPORT
#if defined HAVE_AVX512_ASM_SUPPORT && IS_IN (libc)
# define VEC_SIZE 64
# define VEC(i) zmm##i
# define VMOVU vmovdqu64
Expand Down
16 changes: 9 additions & 7 deletions sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
#define VEC_SIZE 16
#define VEC(i) xmm##i
#define VMOVU movdqu
#define VMOVA movdqa
#if IS_IN (libc)
# define VEC_SIZE 16
# define VEC(i) xmm##i
# define VMOVU movdqu
# define VMOVA movdqa

#define SECTION(p) p
#define MEMMOVE_SYMBOL(p,s) p##_sse2_##s
# define SECTION(p) p
# define MEMMOVE_SYMBOL(p,s) p##_sse2_##s

#include "memmove-vec-unaligned-erms.S"
# include "memmove-vec-unaligned-erms.S"
#endif
18 changes: 10 additions & 8 deletions sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
#define VEC_SIZE 32
#define VEC(i) ymm##i
#define VMOVU vmovdqu
#define VMOVA vmovdqa
#if IS_IN (libc)
# define VEC_SIZE 32
# define VEC(i) ymm##i
# define VMOVU vmovdqu
# define VMOVA vmovdqa

#define VDUP_TO_VEC0_AND_SET_RETURN(d, r) \
# define VDUP_TO_VEC0_AND_SET_RETURN(d, r) \
vmovd d, %xmm0; \
movq r, %rax; \
vpbroadcastb %xmm0, %ymm0

#define SECTION(p) p##.avx
#define MEMSET_SYMBOL(p,s) p##_avx2_##s
# define SECTION(p) p##.avx
# define MEMSET_SYMBOL(p,s) p##_avx2_##s

#include "memset-vec-unaligned-erms.S"
# include "memset-vec-unaligned-erms.S"
#endif
2 changes: 1 addition & 1 deletion sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#ifdef HAVE_AVX512_ASM_SUPPORT
#if defined HAVE_AVX512_ASM_SUPPORT && IS_IN (libc)
# define VEC_SIZE 64
# define VEC(i) zmm##i
# define VMOVU vmovdqu64
Expand Down
18 changes: 10 additions & 8 deletions sysdeps/x86_64/multiarch/memset-sse2-unaligned-erms.S
Original file line number Diff line number Diff line change
@@ -1,16 +1,18 @@
#define VEC_SIZE 16
#define VEC(i) xmm##i
#define VMOVU movdqu
#define VMOVA movdqa
#if IS_IN (libc)
# define VEC_SIZE 16
# define VEC(i) xmm##i
# define VMOVU movdqu
# define VMOVA movdqa

#define VDUP_TO_VEC0_AND_SET_RETURN(d, r) \
# define VDUP_TO_VEC0_AND_SET_RETURN(d, r) \
movd d, %xmm0; \
movq r, %rax; \
punpcklbw %xmm0, %xmm0; \
punpcklwd %xmm0, %xmm0; \
pshufd $0, %xmm0, %xmm0

#define SECTION(p) p
#define MEMSET_SYMBOL(p,s) p##_sse2_##s
# define SECTION(p) p
# define MEMSET_SYMBOL(p,s) p##_sse2_##s

#include "memset-vec-unaligned-erms.S"
# include "memset-vec-unaligned-erms.S"
#endif

0 comments on commit 5cd7af0

Please sign in to comment.