aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorMartyn Capewell <martyn.capewell@arm.com>2020-07-02 11:24:11 +0100
committerJacob Bramley <jacob.bramley@arm.com>2020-07-06 08:38:44 +0000
commit102e7a5edc112c3c561f7b8352619c8156d502b1 (patch)
tree3290f6bed3b415a04d94e9cbdf13260be272ad2a /src
parentebc3b8f5c4aa321d04cb0c5d414e95f697e86078 (diff)
downloadvixl-102e7a5edc112c3c561f7b8352619c8156d502b1.tar.gz
Make assembler more strict about SVE prefetch arguments
Add assertions to the assembler to prevent the use of unsupported addressing modes for prfb/h/w/d. Change-Id: Ie12991eb2e29661eb266fc495e9164246371d10e
Diffstat (limited to 'src')
-rw-r--r--src/aarch64/assembler-sve-aarch64.cc36
1 files changed, 34 insertions, 2 deletions
diff --git a/src/aarch64/assembler-sve-aarch64.cc b/src/aarch64/assembler-sve-aarch64.cc
index 0155a1a7..0a2033d8 100644
--- a/src/aarch64/assembler-sve-aarch64.cc
+++ b/src/aarch64/assembler-sve-aarch64.cc
@@ -4544,17 +4544,25 @@ void Assembler::SVEContiguousPrefetchScalarPlusScalarHelper(
int prefetch_size) {
VIXL_ASSERT(addr.IsScalarPlusScalar());
Instr op = 0xffffffff;
+
switch (prefetch_size) {
case kBRegSize:
+ VIXL_ASSERT(addr.GetOffsetModifier() == NO_SVE_OFFSET_MODIFIER);
op = PRFB_i_p_br_s;
break;
case kHRegSize:
+ VIXL_ASSERT(addr.GetOffsetModifier() == SVE_LSL);
+ VIXL_ASSERT(addr.GetShiftAmount() == kHRegSizeInBytesLog2);
op = PRFH_i_p_br_s;
break;
case kSRegSize:
+ VIXL_ASSERT(addr.GetOffsetModifier() == SVE_LSL);
+ VIXL_ASSERT(addr.GetShiftAmount() == kSRegSizeInBytesLog2);
op = PRFW_i_p_br_s;
break;
case kDRegSize:
+ VIXL_ASSERT(addr.GetOffsetModifier() == SVE_LSL);
+ VIXL_ASSERT(addr.GetShiftAmount() == kDRegSizeInBytesLog2);
op = PRFD_i_p_br_s;
break;
default:
@@ -4576,22 +4584,46 @@ void Assembler::SVEContiguousPrefetchScalarPlusVectorHelper(
ZRegister zm = addr.GetVectorOffset();
SVEOffsetModifier mod = addr.GetOffsetModifier();
+ // All prefetch scalar-plus-vector addressing modes use a shift corresponding
+ // to the element size.
+ switch (prefetch_size) {
+ case kBRegSize:
+ VIXL_ASSERT(addr.GetShiftAmount() == kBRegSizeInBytesLog2);
+ break;
+ case kHRegSize:
+ VIXL_ASSERT(addr.GetShiftAmount() == kHRegSizeInBytesLog2);
+ break;
+ case kSRegSize:
+ VIXL_ASSERT(addr.GetShiftAmount() == kSRegSizeInBytesLog2);
+ break;
+ case kDRegSize:
+ VIXL_ASSERT(addr.GetShiftAmount() == kDRegSizeInBytesLog2);
+ break;
+ default:
+ VIXL_UNIMPLEMENTED();
+ break;
+ }
+
Instr sx = 0;
Instr op = 0xffffffff;
- if (mod == NO_SVE_OFFSET_MODIFIER) {
+ if ((mod == NO_SVE_OFFSET_MODIFIER) || (mod == SVE_LSL)) {
VIXL_ASSERT(zm.IsLaneSizeD());
switch (prefetch_size) {
case kBRegSize:
+ VIXL_ASSERT(mod == NO_SVE_OFFSET_MODIFIER);
op = PRFB_i_p_bz_d_64_scaled;
break;
case kHRegSize:
+ VIXL_ASSERT(mod == SVE_LSL);
op = PRFH_i_p_bz_d_64_scaled;
break;
case kSRegSize:
+ VIXL_ASSERT(mod == SVE_LSL);
op = PRFW_i_p_bz_d_64_scaled;
break;
case kDRegSize:
+ VIXL_ASSERT(mod == SVE_LSL);
op = PRFD_i_p_bz_d_64_scaled;
break;
default:
@@ -4644,7 +4676,7 @@ void Assembler::SVEPrefetchHelper(PrefetchOperation prfop,
} else if (addr.IsScalarPlusImmediate()) {
// For example:
- // [x0, #42, MUL VL]
+ // [x0, #42, mul vl]
SVEGatherPrefetchScalarPlusImmediateHelper(prfop, pg, addr, prefetch_size);
} else if (addr.IsScalarPlusVector()) {