aboutsummaryrefslogtreecommitdiff
path: root/src/aarch64/macro-assembler-aarch64.cc
diff options
context:
space:
mode:
authorMartyn Capewell <martyn.capewell@arm.com>2022-03-01 11:22:27 +0000
committermmc28a <78873583+mmc28a@users.noreply.github.com>2022-03-09 11:31:48 +0000
commit1e97cefcebc1ed82de1f1c279805ef8478758dce (patch)
tree3cc6e8252f54524df00264106869a83916a1da5c /src/aarch64/macro-assembler-aarch64.cc
parent5ed297b58c59143fd52e7224399155f149c800b4 (diff)
downloadvixl-1e97cefcebc1ed82de1f1c279805ef8478758dce.tar.gz
Add explicit IsImmediatePre/PostIndex tests
The MemOperand methods IsPreIndex() and IsPostIndex() checked only the addressing mode, leaving the source of the index (immediate or register) ambiguous. Add new versions that explicitly check for an immediate, and update the code to use them.
Diffstat (limited to 'src/aarch64/macro-assembler-aarch64.cc')
-rw-r--r--src/aarch64/macro-assembler-aarch64.cc11
1 files changed, 7 insertions, 4 deletions
diff --git a/src/aarch64/macro-assembler-aarch64.cc b/src/aarch64/macro-assembler-aarch64.cc
index 28b47aea..e18f846a 100644
--- a/src/aarch64/macro-assembler-aarch64.cc
+++ b/src/aarch64/macro-assembler-aarch64.cc
@@ -1912,6 +1912,9 @@ LS_MACRO_LIST(DEFINE_FUNCTION)
void MacroAssembler::LoadStoreMacro(const CPURegister& rt,
const MemOperand& addr,
LoadStoreOp op) {
+ VIXL_ASSERT(addr.IsImmediateOffset() || addr.IsImmediatePostIndex() ||
+ addr.IsImmediatePreIndex() || addr.IsRegisterOffset());
+
// Worst case is ldr/str pre/post index:
// * 1 instruction for ldr/str
// * up to 4 instructions to materialise the constant
@@ -1932,11 +1935,11 @@ void MacroAssembler::LoadStoreMacro(const CPURegister& rt,
Register temp = temps.AcquireSameSizeAs(addr.GetBaseRegister());
Mov(temp, addr.GetOffset());
LoadStore(rt, MemOperand(addr.GetBaseRegister(), temp), op);
- } else if (addr.IsPostIndex() && !IsImmLSUnscaled(offset)) {
+ } else if (addr.IsImmediatePostIndex() && !IsImmLSUnscaled(offset)) {
// Post-index beyond unscaled addressing range.
LoadStore(rt, MemOperand(addr.GetBaseRegister()), op);
Add(addr.GetBaseRegister(), addr.GetBaseRegister(), Operand(offset));
- } else if (addr.IsPreIndex() && !IsImmLSUnscaled(offset)) {
+ } else if (addr.IsImmediatePreIndex() && !IsImmLSUnscaled(offset)) {
// Pre-index beyond unscaled addressing range.
Add(addr.GetBaseRegister(), addr.GetBaseRegister(), Operand(offset));
LoadStore(rt, MemOperand(addr.GetBaseRegister()), op);
@@ -1984,11 +1987,11 @@ void MacroAssembler::LoadStorePairMacro(const CPURegister& rt,
Register temp = temps.AcquireSameSizeAs(base);
Add(temp, base, offset);
LoadStorePair(rt, rt2, MemOperand(temp), op);
- } else if (addr.IsPostIndex()) {
+ } else if (addr.IsImmediatePostIndex()) {
LoadStorePair(rt, rt2, MemOperand(base), op);
Add(base, base, offset);
} else {
- VIXL_ASSERT(addr.IsPreIndex());
+ VIXL_ASSERT(addr.IsImmediatePreIndex());
Add(base, base, offset);
LoadStorePair(rt, rt2, MemOperand(base), op);
}