aboutsummaryrefslogtreecommitdiff
path: root/test/aarch64
diff options
context:
space:
mode:
authorMartyn Capewell <martyn.capewell@arm.com>2016-11-02 18:52:55 +0000
committerMartyn Capewell <martyn.capewell@arm.com>2016-11-04 13:46:11 +0000
commit5b24fb388927a1f1801a15d460d4c9448f7aa733 (patch)
treebc667970ecf020f20d68189a62c88e4006161660 /test/aarch64
parent9e52d5becfa81b6b819cdc0350693c3ad6b95b1d (diff)
downloadvixl-5b24fb388927a1f1801a15d460d4c9448f7aa733.tar.gz
Fix simulator-aarch64 to satisfy UBSan.
Most changes are fixing shifts of signed integers, or out of range shifts. Change-Id: Ic5787ac2e99f05fd0d3118060f80b39d9e8d5621
Diffstat (limited to 'test/aarch64')
-rw-r--r--test/aarch64/test-assembler-aarch64.cc50
1 files changed, 50 insertions, 0 deletions
diff --git a/test/aarch64/test-assembler-aarch64.cc b/test/aarch64/test-assembler-aarch64.cc
index 567d3241..c4594e9c 100644
--- a/test/aarch64/test-assembler-aarch64.cc
+++ b/test/aarch64/test-assembler-aarch64.cc
@@ -545,6 +545,56 @@ TEST(mov) {
}
+TEST(mov_negative) {
+ SETUP();
+
+ START();
+ __ Mov(w11, 0xffffffff);
+ __ Mov(x12, 0xffffffffffffffff);
+
+ __ Mov(w13, Operand(w11, LSL, 1));
+ __ Mov(w14, Operand(w11, LSR, 1));
+ __ Mov(w15, Operand(w11, ASR, 1));
+ __ Mov(w18, Operand(w11, ROR, 1));
+ __ Mov(w19, Operand(w11, UXTB, 1));
+ __ Mov(w20, Operand(w11, SXTB, 1));
+ __ Mov(w21, Operand(w11, UXTH, 1));
+ __ Mov(w22, Operand(w11, SXTH, 1));
+
+ __ Mov(x23, Operand(x12, LSL, 1));
+ __ Mov(x24, Operand(x12, LSR, 1));
+ __ Mov(x25, Operand(x12, ASR, 1));
+ __ Mov(x26, Operand(x12, ROR, 1));
+ __ Mov(x27, Operand(x12, UXTH, 1));
+ __ Mov(x28, Operand(x12, SXTH, 1));
+ __ Mov(x29, Operand(x12, UXTW, 1));
+ __ Mov(x30, Operand(x12, SXTW, 1));
+ END();
+
+ RUN();
+
+ ASSERT_EQUAL_64(0xfffffffe, x13);
+ ASSERT_EQUAL_64(0x7fffffff, x14);
+ ASSERT_EQUAL_64(0xffffffff, x15);
+ ASSERT_EQUAL_64(0xffffffff, x18);
+ ASSERT_EQUAL_64(0x000001fe, x19);
+ ASSERT_EQUAL_64(0xfffffffe, x20);
+ ASSERT_EQUAL_64(0x0001fffe, x21);
+ ASSERT_EQUAL_64(0xfffffffe, x22);
+
+ ASSERT_EQUAL_64(0xfffffffffffffffe, x23);
+ ASSERT_EQUAL_64(0x7fffffffffffffff, x24);
+ ASSERT_EQUAL_64(0xffffffffffffffff, x25);
+ ASSERT_EQUAL_64(0xffffffffffffffff, x26);
+ ASSERT_EQUAL_64(0x000000000001fffe, x27);
+ ASSERT_EQUAL_64(0xfffffffffffffffe, x28);
+ ASSERT_EQUAL_64(0x00000001fffffffe, x29);
+ ASSERT_EQUAL_64(0xfffffffffffffffe, x30);
+
+ TEARDOWN();
+}
+
+
TEST(orr) {
SETUP();