aboutsummaryrefslogtreecommitdiff
path: root/none
diff options
context:
space:
mode:
authorrhyskidd <rhyskidd@a5019735-40e9-0310-863c-91ae7b9d1cf9>2015-08-15 12:21:42 +0000
committerrhyskidd <rhyskidd@a5019735-40e9-0310-863c-91ae7b9d1cf9>2015-08-15 12:21:42 +0000
commit7232d21715667faf36d067c75606abecbb85f17f (patch)
tree126c661216b1f4293ce06e3c4b1d28ff2c402a1d /none
parent917ca31cf6b0d1f92c7168f5557b1dba8fd25253 (diff)
downloadvalgrind-7232d21715667faf36d067c75606abecbb85f17f.tar.gz
Increase test coverage on OS X, by re-enabling the none/tests/amd64/avx2-1 regression test.
n-i-bz (Unfortunately I don’t have right here the hw support, but build environment works) $ perl tests/vg_regtest none/tests/amd64/avx2-1 avx2-1: (skipping, prereq failed: test -x avx2-1 && ../../../tests/x86_amd64_features amd64-avx) == 0 tests, 0 stderr failures, 0 stdout failures, 0 stderrB failures, 0 stdoutB failures, 0 post failures == On OS X 10.10 Before: == 594 tests, 215 stderr failures, 9 stdout failures, 0 stderrB failures, 0 stdoutB failures, 30 post failures == After: == 594 tests, 215 stderr failures, 9 stdout failures, 0 stderrB failures, 0 stdoutB failures, 30 post failures == git-svn-id: svn://svn.valgrind.org/valgrind/trunk@15553 a5019735-40e9-0310-863c-91ae7b9d1cf9
Diffstat (limited to 'none')
-rw-r--r--none/tests/amd64/Makefile.am10
-rw-r--r--none/tests/amd64/avx2-1.c56
2 files changed, 37 insertions, 29 deletions
diff --git a/none/tests/amd64/Makefile.am b/none/tests/amd64/Makefile.am
index 7b2594457..be4113b6c 100644
--- a/none/tests/amd64/Makefile.am
+++ b/none/tests/amd64/Makefile.am
@@ -110,6 +110,11 @@ if BUILD_VPCLMULQDQ_TESTS
check_PROGRAMS += avx-1
endif
endif
+if BUILD_AVX2_TESTS
+if !COMPILER_IS_ICC
+ check_PROGRAMS += avx2-1
+endif
+endif
if BUILD_SSSE3_TESTS
check_PROGRAMS += ssse3_misaligned
endif
@@ -152,11 +157,6 @@ if ! VGCONF_OS_IS_DARWIN
if BUILD_LOOPNEL_TESTS
check_PROGRAMS += loopnel
endif
-if BUILD_AVX2_TESTS
-if !COMPILER_IS_ICC
- check_PROGRAMS += avx2-1
-endif
-endif
endif
AM_CFLAGS += @FLAG_M64@
diff --git a/none/tests/amd64/avx2-1.c b/none/tests/amd64/avx2-1.c
index b04d05b58..aa4e3679d 100644
--- a/none/tests/amd64/avx2-1.c
+++ b/none/tests/amd64/avx2-1.c
@@ -9,7 +9,11 @@ typedef unsigned int UInt;
typedef unsigned long int UWord;
typedef unsigned long long int ULong;
+#if defined(VGO_darwin)
UChar randArray[1027] __attribute__((used));
+#else
+UChar _randArray[1027] __attribute__((used));
+#endif
#define IS_32_ALIGNED(_ptr) (0 == (0x1F & (UWord)(_ptr)))
@@ -975,7 +979,7 @@ GEN_test_Ronly(VGATHERDPS_128,
"vpslld $25, %%xmm7, %%xmm8;"
"vpsrld $25, %%xmm8, %%xmm8;"
"vblendvps %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vgatherdps %%xmm6, 3(%%r14,%%xmm8,4), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -983,7 +987,7 @@ GEN_test_Ronly(VGATHERDPS_256,
"vpslld $25, %%ymm7, %%ymm8;"
"vpsrld $25, %%ymm8, %%ymm8;"
"vblendvps %%ymm6, %%ymm8, %%ymm7, %%ymm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vgatherdps %%ymm6, 3(%%r14,%%ymm8,4), %%ymm9;"
"xorl %%r14d, %%r14d")
@@ -993,7 +997,7 @@ GEN_test_Ronly(VGATHERQPS_128_1,
"vpmovsxdq %%xmm6, %%xmm9;"
"vblendvpd %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vgatherqps %%xmm6, 3(%%r14,%%xmm8,4), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1003,7 +1007,7 @@ GEN_test_Ronly(VGATHERQPS_256_1,
"vpmovsxdq %%xmm6, %%ymm9;"
"vblendvpd %%ymm9, %%ymm8, %%ymm7, %%ymm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vgatherqps %%xmm6, 3(%%r14,%%ymm8,4), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1013,7 +1017,7 @@ GEN_test_Ronly(VGATHERQPS_128_2,
"vpmovsxdq %%xmm6, %%xmm9;"
"vblendvpd %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vmovq %%r14, %%xmm7;"
"vpsllq $2, %%xmm8, %%xmm8;"
"vpbroadcastq %%xmm7, %%xmm7;"
@@ -1029,7 +1033,7 @@ GEN_test_Ronly(VGATHERQPS_256_2,
"vpmovsxdq %%xmm6, %%ymm9;"
"vblendvpd %%ymm9, %%ymm8, %%ymm7, %%ymm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vmovq %%r14, %%xmm7;"
"vpsllq $2, %%ymm8, %%ymm8;"
"vpbroadcastq %%xmm7, %%ymm7;"
@@ -1045,7 +1049,7 @@ GEN_test_Ronly(VGATHERDPD_128,
"vshufps $13, %%xmm6, %%xmm6, %%xmm9;"
"vblendvps %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vgatherdpd %%xmm6, 3(%%r14,%%xmm8,8), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1056,7 +1060,7 @@ GEN_test_Ronly(VGATHERDPD_256,
"vshufps $221, %%ymm9, %%ymm6, %%ymm9;"
"vblendvps %%ymm9, %%ymm8, %%ymm7, %%ymm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vgatherdpd %%ymm6, 3(%%r14,%%xmm8,8), %%ymm9;"
"xorl %%r14d, %%r14d")
@@ -1064,7 +1068,7 @@ GEN_test_Ronly(VGATHERQPD_128_1,
"vpsllq $58, %%xmm7, %%xmm8;"
"vpsrlq $58, %%xmm8, %%xmm8;"
"vblendvpd %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vgatherqpd %%xmm6, 3(%%r14,%%xmm8,8), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1072,7 +1076,7 @@ GEN_test_Ronly(VGATHERQPD_256_1,
"vpsllq $58, %%ymm7, %%ymm8;"
"vpsrlq $58, %%ymm8, %%ymm8;"
"vblendvpd %%ymm6, %%ymm8, %%ymm7, %%ymm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vgatherqpd %%ymm6, 3(%%r14,%%ymm8,8), %%ymm9;"
"xorl %%r14d, %%r14d")
@@ -1080,7 +1084,7 @@ GEN_test_Ronly(VGATHERQPD_128_2,
"vpsllq $58, %%xmm7, %%xmm8;"
"vpsrlq $58, %%xmm8, %%xmm8;"
"vblendvpd %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vmovq %%r14, %%xmm7;"
"vpsllq $2, %%xmm8, %%xmm8;"
"vpbroadcastq %%xmm7, %%xmm7;"
@@ -1094,7 +1098,7 @@ GEN_test_Ronly(VGATHERQPD_256_2,
"vpsllq $58, %%ymm7, %%ymm8;"
"vpsrlq $58, %%ymm8, %%ymm8;"
"vblendvpd %%ymm6, %%ymm8, %%ymm7, %%ymm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vmovq %%r14, %%xmm7;"
"vpsllq $2, %%ymm8, %%ymm8;"
"vpbroadcastq %%xmm7, %%ymm7;"
@@ -1108,7 +1112,7 @@ GEN_test_Ronly(VPGATHERDD_128,
"vpslld $25, %%xmm7, %%xmm8;"
"vpsrld $25, %%xmm8, %%xmm8;"
"vblendvps %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vpgatherdd %%xmm6, 3(%%r14,%%xmm8,4), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1116,7 +1120,7 @@ GEN_test_Ronly(VPGATHERDD_256,
"vpslld $25, %%ymm7, %%ymm8;"
"vpsrld $25, %%ymm8, %%ymm8;"
"vblendvps %%ymm6, %%ymm8, %%ymm7, %%ymm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vpgatherdd %%ymm6, 3(%%r14,%%ymm8,4), %%ymm9;"
"xorl %%r14d, %%r14d")
@@ -1126,7 +1130,7 @@ GEN_test_Ronly(VPGATHERQD_128_1,
"vpmovsxdq %%xmm6, %%xmm9;"
"vblendvpd %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vpgatherqd %%xmm6, 3(%%r14,%%xmm8,4), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1136,7 +1140,7 @@ GEN_test_Ronly(VPGATHERQD_256_1,
"vpmovsxdq %%xmm6, %%ymm9;"
"vblendvpd %%ymm9, %%ymm8, %%ymm7, %%ymm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vpgatherqd %%xmm6, 3(%%r14,%%ymm8,4), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1146,7 +1150,7 @@ GEN_test_Ronly(VPGATHERQD_128_2,
"vpmovsxdq %%xmm6, %%xmm9;"
"vblendvpd %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vmovq %%r14, %%xmm7;"
"vpsllq $2, %%xmm8, %%xmm8;"
"vpbroadcastq %%xmm7, %%xmm7;"
@@ -1162,7 +1166,7 @@ GEN_test_Ronly(VPGATHERQD_256_2,
"vpmovsxdq %%xmm6, %%ymm9;"
"vblendvpd %%ymm9, %%ymm8, %%ymm7, %%ymm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vmovq %%r14, %%xmm7;"
"vpsllq $2, %%ymm8, %%ymm8;"
"vpbroadcastq %%xmm7, %%ymm7;"
@@ -1178,7 +1182,7 @@ GEN_test_Ronly(VPGATHERDQ_128,
"vshufps $13, %%xmm6, %%xmm6, %%xmm9;"
"vblendvps %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vpgatherdq %%xmm6, 3(%%r14,%%xmm8,8), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1189,7 +1193,7 @@ GEN_test_Ronly(VPGATHERDQ_256,
"vshufps $221, %%ymm9, %%ymm6, %%ymm9;"
"vblendvps %%ymm9, %%ymm8, %%ymm7, %%ymm8;"
"vmovdqa 96(%0), %%ymm9;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vpgatherdq %%ymm6, 3(%%r14,%%xmm8,8), %%ymm9;"
"xorl %%r14d, %%r14d")
@@ -1197,7 +1201,7 @@ GEN_test_Ronly(VPGATHERQQ_128_1,
"vpsllq $58, %%xmm7, %%xmm8;"
"vpsrlq $58, %%xmm8, %%xmm8;"
"vblendvpd %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vpgatherqq %%xmm6, 3(%%r14,%%xmm8,8), %%xmm9;"
"xorl %%r14d, %%r14d")
@@ -1205,7 +1209,7 @@ GEN_test_Ronly(VPGATHERQQ_256_1,
"vpsllq $58, %%ymm7, %%ymm8;"
"vpsrlq $58, %%ymm8, %%ymm8;"
"vblendvpd %%ymm6, %%ymm8, %%ymm7, %%ymm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vpgatherqq %%ymm6, 3(%%r14,%%ymm8,8), %%ymm9;"
"xorl %%r14d, %%r14d")
@@ -1213,7 +1217,7 @@ GEN_test_Ronly(VPGATHERQQ_128_2,
"vpsllq $58, %%xmm7, %%xmm8;"
"vpsrlq $58, %%xmm8, %%xmm8;"
"vblendvpd %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vmovq %%r14, %%xmm7;"
"vpsllq $2, %%xmm8, %%xmm8;"
"vpbroadcastq %%xmm7, %%xmm7;"
@@ -1227,7 +1231,7 @@ GEN_test_Ronly(VPGATHERQQ_256_2,
"vpsllq $58, %%ymm7, %%ymm8;"
"vpsrlq $58, %%ymm8, %%ymm8;"
"vblendvpd %%ymm6, %%ymm8, %%ymm7, %%ymm8;"
- "leaq randArray(%%rip), %%r14;"
+ "leaq _randArray(%%rip), %%r14;"
"vmovq %%r14, %%xmm7;"
"vpsllq $2, %%ymm8, %%ymm8;"
"vpbroadcastq %%xmm7, %%ymm7;"
@@ -1466,7 +1470,11 @@ int main ( void )
DO_D( VPMASKMOVD_256_StoreForm );
DO_D( VPMASKMOVQ_128_StoreForm );
DO_D( VPMASKMOVQ_256_StoreForm );
+#if defined(VGO_darwin)
{ int i; for (i = 0; i < sizeof(randArray); i++) randArray[i] = randUChar(); }
+#else
+ { int i; for (i = 0; i < sizeof(_randArray); i++) _randArray[i] = randUChar(); }
+#endif
DO_D( VGATHERDPS_128 );
DO_D( VGATHERDPS_256 );
DO_D( VGATHERQPS_128_1 );