aboutsummaryrefslogtreecommitdiff
path: root/src/aarch64/simulator-aarch64.cc
diff options
context:
space:
mode:
authorMartyn Capewell <martyn.capewell@arm.com>2020-06-05 18:20:11 +0100
committerTatWai Chong <tatwai.chong@arm.com>2020-06-22 12:51:34 -0700
commita5112344aa6a2c562379ec67398a6719360965bf (patch)
tree4ea88d7d4a0701721cb5702f510d04af85fe8f66 /src/aarch64/simulator-aarch64.cc
parentcd3f6c5ec96ff6d8240a07e7084ae5de700dc9c7 (diff)
downloadvixl-a5112344aa6a2c562379ec67398a6719360965bf.tar.gz
[sve] Complete remaining gather loads.
Implement remaining 64-bit gather loads including unpacking, unscaled and scaled offset form. Change-Id: I208de1fabfe40f7095f9848c3ebf9de82a5f7416
Diffstat (limited to 'src/aarch64/simulator-aarch64.cc')
-rw-r--r--src/aarch64/simulator-aarch64.cc38
1 files changed, 0 insertions, 38 deletions
diff --git a/src/aarch64/simulator-aarch64.cc b/src/aarch64/simulator-aarch64.cc
index d0fa2b90..1a05462d 100644
--- a/src/aarch64/simulator-aarch64.cc
+++ b/src/aarch64/simulator-aarch64.cc
@@ -9779,24 +9779,13 @@ void Simulator::VisitSVELoadVectorRegister(const Instruction* instr) {
void Simulator::VisitSVE64BitGatherLoad_ScalarPlus32BitUnpackedScaledOffsets(
const Instruction* instr) {
- USE(instr);
switch (instr->Mask(
SVE64BitGatherLoad_ScalarPlus32BitUnpackedScaledOffsetsMask)) {
case LD1D_z_p_bz_d_x32_scaled:
- VIXL_UNIMPLEMENTED();
- break;
case LD1H_z_p_bz_d_x32_scaled:
- VIXL_UNIMPLEMENTED();
- break;
case LD1SH_z_p_bz_d_x32_scaled:
- VIXL_UNIMPLEMENTED();
- break;
case LD1SW_z_p_bz_d_x32_scaled:
- VIXL_UNIMPLEMENTED();
- break;
case LD1W_z_p_bz_d_x32_scaled:
- VIXL_UNIMPLEMENTED();
- break;
case LDFF1H_z_p_bz_d_x32_scaled:
case LDFF1W_z_p_bz_d_x32_scaled:
case LDFF1D_z_p_bz_d_x32_scaled:
@@ -9814,23 +9803,12 @@ void Simulator::VisitSVE64BitGatherLoad_ScalarPlus32BitUnpackedScaledOffsets(
void Simulator::VisitSVE64BitGatherLoad_ScalarPlus64BitScaledOffsets(
const Instruction* instr) {
- USE(instr);
switch (instr->Mask(SVE64BitGatherLoad_ScalarPlus64BitScaledOffsetsMask)) {
case LD1D_z_p_bz_d_64_scaled:
- VIXL_UNIMPLEMENTED();
- break;
case LD1H_z_p_bz_d_64_scaled:
- VIXL_UNIMPLEMENTED();
- break;
case LD1SH_z_p_bz_d_64_scaled:
- VIXL_UNIMPLEMENTED();
- break;
case LD1SW_z_p_bz_d_64_scaled:
- VIXL_UNIMPLEMENTED();
- break;
case LD1W_z_p_bz_d_64_scaled:
- VIXL_UNIMPLEMENTED();
- break;
case LDFF1H_z_p_bz_d_64_scaled:
case LDFF1W_z_p_bz_d_64_scaled:
case LDFF1D_z_p_bz_d_64_scaled:
@@ -9847,7 +9825,6 @@ void Simulator::VisitSVE64BitGatherLoad_ScalarPlus64BitScaledOffsets(
void Simulator::VisitSVE64BitGatherLoad_ScalarPlus64BitUnscaledOffsets(
const Instruction* instr) {
- USE(instr);
switch (instr->Mask(SVE64BitGatherLoad_ScalarPlus64BitUnscaledOffsetsMask)) {
case LD1B_z_p_bz_d_64_unscaled:
case LD1D_z_p_bz_d_64_unscaled:
@@ -9876,30 +9853,15 @@ void Simulator::VisitSVE64BitGatherLoad_ScalarPlus64BitUnscaledOffsets(
void Simulator::VisitSVE64BitGatherLoad_ScalarPlusUnpacked32BitUnscaledOffsets(
const Instruction* instr) {
- USE(instr);
switch (instr->Mask(
SVE64BitGatherLoad_ScalarPlusUnpacked32BitUnscaledOffsetsMask)) {
case LD1B_z_p_bz_d_x32_unscaled:
- VIXL_UNIMPLEMENTED();
- break;
case LD1D_z_p_bz_d_x32_unscaled:
- VIXL_UNIMPLEMENTED();
- break;
case LD1H_z_p_bz_d_x32_unscaled:
- VIXL_UNIMPLEMENTED();
- break;
case LD1SB_z_p_bz_d_x32_unscaled:
- VIXL_UNIMPLEMENTED();
- break;
case LD1SH_z_p_bz_d_x32_unscaled:
- VIXL_UNIMPLEMENTED();
- break;
case LD1SW_z_p_bz_d_x32_unscaled:
- VIXL_UNIMPLEMENTED();
- break;
case LD1W_z_p_bz_d_x32_unscaled:
- VIXL_UNIMPLEMENTED();
- break;
case LDFF1B_z_p_bz_d_x32_unscaled:
case LDFF1H_z_p_bz_d_x32_unscaled:
case LDFF1W_z_p_bz_d_x32_unscaled: