aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMartyn Capewell <martyn.capewell@arm.com>2022-03-01 11:22:27 +0000
committermmc28a <78873583+mmc28a@users.noreply.github.com>2022-03-09 11:31:48 +0000
commit1e97cefcebc1ed82de1f1c279805ef8478758dce (patch)
tree3cc6e8252f54524df00264106869a83916a1da5c
parent5ed297b58c59143fd52e7224399155f149c800b4 (diff)
downloadvixl-1e97cefcebc1ed82de1f1c279805ef8478758dce.tar.gz
Add explicit IsImmediatePre/PostIndex tests
The MemOperand methods IsPreIndex() and IsPostIndex() checked only the addressing mode, leaving the source of the index (immediate or register) ambiguous. Add new versions that explicitly check for an immediate, and update the code to use them.
-rw-r--r--src/aarch64/assembler-aarch64.cc12
-rw-r--r--src/aarch64/disasm-aarch64.cc2
-rw-r--r--src/aarch64/macro-assembler-aarch64.cc11
-rw-r--r--src/aarch64/operands-aarch64.cc10
-rw-r--r--src/aarch64/operands-aarch64.h7
-rw-r--r--tools/code_coverage.log1
6 files changed, 28 insertions, 15 deletions
diff --git a/src/aarch64/assembler-aarch64.cc b/src/aarch64/assembler-aarch64.cc
index 1304ede2..e4cacd46 100644
--- a/src/aarch64/assembler-aarch64.cc
+++ b/src/aarch64/assembler-aarch64.cc
@@ -1134,10 +1134,10 @@ void Assembler::LoadStorePair(const CPURegister& rt,
if (addr.IsImmediateOffset()) {
addrmodeop = LoadStorePairOffsetFixed;
} else {
- if (addr.IsPreIndex()) {
+ if (addr.IsImmediatePreIndex()) {
addrmodeop = LoadStorePairPreIndexFixed;
} else {
- VIXL_ASSERT(addr.IsPostIndex());
+ VIXL_ASSERT(addr.IsImmediatePostIndex());
addrmodeop = LoadStorePairPostIndexFixed;
}
}
@@ -5793,11 +5793,11 @@ Instr Assembler::LoadStoreMemOperand(const MemOperand& addr,
ExtendMode(ext) | ImmShiftLS((shift_amount > 0) ? 1 : 0);
}
- if (addr.IsPreIndex() && IsImmLSUnscaled(offset)) {
+ if (addr.IsImmediatePreIndex() && IsImmLSUnscaled(offset)) {
return base | LoadStorePreIndexFixed | ImmLS(offset);
}
- if (addr.IsPostIndex() && IsImmLSUnscaled(offset)) {
+ if (addr.IsImmediatePostIndex() && IsImmLSUnscaled(offset)) {
return base | LoadStorePostIndexFixed | ImmLS(offset);
}
@@ -5819,10 +5819,10 @@ void Assembler::LoadStorePAC(const Register& xt,
const MemOperand& addr,
LoadStorePACOp op) {
VIXL_ASSERT(xt.Is64Bits());
- VIXL_ASSERT(addr.IsImmediateOffset() || addr.IsPreIndex());
+ VIXL_ASSERT(addr.IsImmediateOffset() || addr.IsImmediatePreIndex());
Instr pac_op = op;
- if (addr.IsPreIndex()) {
+ if (addr.IsImmediatePreIndex()) {
pac_op |= LoadStorePACPreBit;
}
diff --git a/src/aarch64/disasm-aarch64.cc b/src/aarch64/disasm-aarch64.cc
index 72c71e76..15165b97 100644
--- a/src/aarch64/disasm-aarch64.cc
+++ b/src/aarch64/disasm-aarch64.cc
@@ -645,7 +645,7 @@ const Disassembler::FormToVisitorFnMap *Disassembler::GetFormToVisitorFnMap() {
{"usdot_asimdsame2_d", &Disassembler::VisitNEON3SameExtra},
};
return &form_to_visitor;
-}
+} // NOLINT(readability/fn_size)
Disassembler::Disassembler() {
buffer_size_ = 256;
diff --git a/src/aarch64/macro-assembler-aarch64.cc b/src/aarch64/macro-assembler-aarch64.cc
index 28b47aea..e18f846a 100644
--- a/src/aarch64/macro-assembler-aarch64.cc
+++ b/src/aarch64/macro-assembler-aarch64.cc
@@ -1912,6 +1912,9 @@ LS_MACRO_LIST(DEFINE_FUNCTION)
void MacroAssembler::LoadStoreMacro(const CPURegister& rt,
const MemOperand& addr,
LoadStoreOp op) {
+ VIXL_ASSERT(addr.IsImmediateOffset() || addr.IsImmediatePostIndex() ||
+ addr.IsImmediatePreIndex() || addr.IsRegisterOffset());
+
// Worst case is ldr/str pre/post index:
// * 1 instruction for ldr/str
// * up to 4 instructions to materialise the constant
@@ -1932,11 +1935,11 @@ void MacroAssembler::LoadStoreMacro(const CPURegister& rt,
Register temp = temps.AcquireSameSizeAs(addr.GetBaseRegister());
Mov(temp, addr.GetOffset());
LoadStore(rt, MemOperand(addr.GetBaseRegister(), temp), op);
- } else if (addr.IsPostIndex() && !IsImmLSUnscaled(offset)) {
+ } else if (addr.IsImmediatePostIndex() && !IsImmLSUnscaled(offset)) {
// Post-index beyond unscaled addressing range.
LoadStore(rt, MemOperand(addr.GetBaseRegister()), op);
Add(addr.GetBaseRegister(), addr.GetBaseRegister(), Operand(offset));
- } else if (addr.IsPreIndex() && !IsImmLSUnscaled(offset)) {
+ } else if (addr.IsImmediatePreIndex() && !IsImmLSUnscaled(offset)) {
// Pre-index beyond unscaled addressing range.
Add(addr.GetBaseRegister(), addr.GetBaseRegister(), Operand(offset));
LoadStore(rt, MemOperand(addr.GetBaseRegister()), op);
@@ -1984,11 +1987,11 @@ void MacroAssembler::LoadStorePairMacro(const CPURegister& rt,
Register temp = temps.AcquireSameSizeAs(base);
Add(temp, base, offset);
LoadStorePair(rt, rt2, MemOperand(temp), op);
- } else if (addr.IsPostIndex()) {
+ } else if (addr.IsImmediatePostIndex()) {
LoadStorePair(rt, rt2, MemOperand(base), op);
Add(base, base, offset);
} else {
- VIXL_ASSERT(addr.IsPreIndex());
+ VIXL_ASSERT(addr.IsImmediatePreIndex());
Add(base, base, offset);
LoadStorePair(rt, rt2, MemOperand(base), op);
}
diff --git a/src/aarch64/operands-aarch64.cc b/src/aarch64/operands-aarch64.cc
index fa0162d9..8db129c9 100644
--- a/src/aarch64/operands-aarch64.cc
+++ b/src/aarch64/operands-aarch64.cc
@@ -360,12 +360,16 @@ bool MemOperand::IsRegisterOffset() const {
return (addrmode_ == Offset) && !regoffset_.Is(NoReg);
}
-
bool MemOperand::IsPreIndex() const { return addrmode_ == PreIndex; }
-
-
bool MemOperand::IsPostIndex() const { return addrmode_ == PostIndex; }
+bool MemOperand::IsImmediatePreIndex() const {
+ return IsPreIndex() && regoffset_.Is(NoReg);
+}
+
+bool MemOperand::IsImmediatePostIndex() const {
+ return IsPostIndex() && regoffset_.Is(NoReg);
+}
void MemOperand::AddOffset(int64_t offset) {
VIXL_ASSERT(IsImmediateOffset());
diff --git a/src/aarch64/operands-aarch64.h b/src/aarch64/operands-aarch64.h
index b442cfd6..08ee4a61 100644
--- a/src/aarch64/operands-aarch64.h
+++ b/src/aarch64/operands-aarch64.h
@@ -434,9 +434,14 @@ class MemOperand {
bool IsImmediateOffset() const;
// True for register-offset (but not indexed) MemOperands.
bool IsRegisterOffset() const;
-
+ // True for immediate or register pre-indexed MemOperands.
bool IsPreIndex() const;
+ // True for immediate or register post-indexed MemOperands.
bool IsPostIndex() const;
+ // True for immediate pre-indexed MemOperands, [reg, #imm]!
+ bool IsImmediatePreIndex() const;
+ // True for immediate post-indexed MemOperands, [reg], #imm
+ bool IsImmediatePostIndex() const;
void AddOffset(int64_t offset);
diff --git a/tools/code_coverage.log b/tools/code_coverage.log
index fc2d2596..34f54494 100644
--- a/tools/code_coverage.log
+++ b/tools/code_coverage.log
@@ -4,3 +4,4 @@
1636647628 82.97% 97.54% 95.28%
1639684221 82.92% 97.51% 94.06%
1642688881 82.94% 97.51% 95.27%
+1646150629 82.94% 97.51% 95.36%