summaryrefslogtreecommitdiff
path: root/mali_kbase/hw_access/mali_kbase_hw_access_regmap.h
diff options
context:
space:
mode:
Diffstat (limited to 'mali_kbase/hw_access/mali_kbase_hw_access_regmap.h')
-rw-r--r--mali_kbase/hw_access/mali_kbase_hw_access_regmap.h507
1 files changed, 507 insertions, 0 deletions
diff --git a/mali_kbase/hw_access/mali_kbase_hw_access_regmap.h b/mali_kbase/hw_access/mali_kbase_hw_access_regmap.h
new file mode 100644
index 0000000..9bd646d
--- /dev/null
+++ b/mali_kbase/hw_access/mali_kbase_hw_access_regmap.h
@@ -0,0 +1,507 @@
+/* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */
+/*
+ *
+ * (C) COPYRIGHT 2023 ARM Limited. All rights reserved.
+ *
+ * This program is free software and is provided to you under the terms of the
+ * GNU General Public License version 2 as published by the Free Software
+ * Foundation, and any use by you of this program is subject to the terms
+ * of such GNU license.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, you can access it online at
+ * http://www.gnu.org/licenses/gpl-2.0.html.
+ *
+ */
+
+#ifndef _MALI_KBASE_HW_ACCESS_REGMAP_H_
+#define _MALI_KBASE_HW_ACCESS_REGMAP_H_
+
+#if MALI_USE_CSF
+#include "regmap/mali_kbase_regmap_csf_enums.h"
+#include "regmap/mali_kbase_regmap_csf_macros.h"
+#else
+#include "regmap/mali_kbase_regmap_jm_enums.h"
+#include "regmap/mali_kbase_regmap_jm_macros.h"
+#endif
+
+/* GPU_U definition */
+#ifdef __ASSEMBLER__
+#define GPU_U(x) x
+#define GPU_UL(x) x
+#define GPU_ULL(x) x
+#else
+#define GPU_U(x) x##u
+#define GPU_UL(x) x##ul
+#define GPU_ULL(x) x##ull
+#endif /* __ASSEMBLER__ */
+
+/* common GPU_STATUS values */
+#define GPU_STATUS_PROTECTED_MODE_ACTIVE (1 << 7) /* Set if protected mode is active */
+#define GPU_STATUS_GPU_DBG_ENABLED (1 << 8) /* DBGEN wire status */
+
+#define GPU_SYSC_ALLOC_COUNT 8
+#define GPU_L2_SLICE_HASH_COUNT 3
+/* GPU_ASN_HASH_COUNT is an alias to GPU_L2_SLICE_HASH_COUNT */
+#define GPU_ASN_HASH_COUNT GPU_L2_SLICE_HASH_COUNT
+
+/* Cores groups are l2 coherent */
+#define MEM_FEATURES_COHERENT_CORE_GROUP_SHIFT GPU_U(0)
+#define MEM_FEATURES_COHERENT_CORE_GROUP_MASK (GPU_U(0x1) << MEM_FEATURES_COHERENT_CORE_GROUP_SHIFT)
+
+#define GPU_IRQ_REG_ALL (GPU_IRQ_REG_COMMON)
+
+/*
+ * MMU_IRQ_RAWSTAT register values. Values are valid also for
+ * MMU_IRQ_CLEAR, MMU_IRQ_MASK, MMU_IRQ_STATUS registers.
+ */
+#define MMU_PAGE_FAULT_FLAGS 16
+
+/* Macros returning a bitmask to retrieve page fault or bus error flags from
+ * MMU registers
+ */
+#define MMU_PAGE_FAULT(n) (1UL << (n))
+#define MMU_BUS_ERROR(n) (1UL << ((n) + MMU_PAGE_FAULT_FLAGS))
+
+/*
+ * Begin MMU STATUS register values
+ */
+#define AS_STATUS_AS_ACTIVE_EXT_SHIFT GPU_U(0)
+#define AS_STATUS_AS_ACTIVE_EXT_MASK (GPU_U(0x1) << AS_STATUS_AS_ACTIVE_EXT_SHIFT)
+#define AS_STATUS_AS_ACTIVE_EXT_GET(reg_val) \
+ (((reg_val)&AS_STATUS_AS_ACTIVE_EXT_MASK) >> AS_STATUS_AS_ACTIVE_EXT_SHIFT)
+#define AS_STATUS_AS_ACTIVE_EXT_SET(reg_val, value) \
+ (~(~(reg_val) | AS_STATUS_AS_ACTIVE_EXT_MASK) | \
+ (((value) << AS_STATUS_AS_ACTIVE_EXT_SHIFT) & AS_STATUS_AS_ACTIVE_EXT_MASK))
+
+/*
+ * Begin MMU FAULTSTATUS register values
+ */
+#define AS_FAULTSTATUS_EXCEPTION_TYPE_SHIFT GPU_U(0)
+#define AS_FAULTSTATUS_EXCEPTION_TYPE_MASK (GPU_U(0xFF) << AS_FAULTSTATUS_EXCEPTION_TYPE_SHIFT)
+#define AS_FAULTSTATUS_EXCEPTION_TYPE_GET(reg_val) \
+ (((reg_val)&AS_FAULTSTATUS_EXCEPTION_TYPE_MASK) >> AS_FAULTSTATUS_EXCEPTION_TYPE_SHIFT)
+#define AS_FAULTSTATUS_EXCEPTION_TYPE_SET(reg_val, value) \
+ (~(~(reg_val) | AS_FAULTSTATUS_EXCEPTION_TYPE_MASK) | \
+ (((value) << AS_FAULTSTATUS_EXCEPTION_TYPE_SHIFT) & AS_FAULTSTATUS_EXCEPTION_TYPE_MASK))
+
+#define AS_FAULTSTATUS_ACCESS_TYPE_SHIFT GPU_U(8)
+#define AS_FAULTSTATUS_ACCESS_TYPE_MASK (GPU_U(0x3) << AS_FAULTSTATUS_ACCESS_TYPE_SHIFT)
+#define AS_FAULTSTATUS_ACCESS_TYPE_GET(reg_val) \
+ (((reg_val)&AS_FAULTSTATUS_ACCESS_TYPE_MASK) >> AS_FAULTSTATUS_ACCESS_TYPE_SHIFT)
+#define AS_FAULTSTATUS_ACCESS_TYPE_SET(reg_val, value) \
+ (~(~(reg_val) | AS_FAULTSTATUS_ACCESS_TYPE_MASK) | \
+ (((value) << AS_FAULTSTATUS_ACCESS_TYPE_SHIFT) & AS_FAULTSTATUS_ACCESS_TYPE_MASK))
+
+#define AS_FAULTSTATUS_ACCESS_TYPE_ATOMIC 0x0
+#define AS_FAULTSTATUS_ACCESS_TYPE_EXECUTE 0x1
+#define AS_FAULTSTATUS_ACCESS_TYPE_READ 0x2
+#define AS_FAULTSTATUS_ACCESS_TYPE_WRITE 0x3
+
+#define AS_FAULTSTATUS_SOURCE_ID_SHIFT GPU_U(16)
+#define AS_FAULTSTATUS_SOURCE_ID_MASK (GPU_U(0xFFFF) << AS_FAULTSTATUS_SOURCE_ID_SHIFT)
+#define AS_FAULTSTATUS_SOURCE_ID_GET(reg_val) \
+ (((reg_val)&AS_FAULTSTATUS_SOURCE_ID_MASK) >> AS_FAULTSTATUS_SOURCE_ID_SHIFT)
+#define AS_FAULTSTATUS_SOURCE_ID_SET(reg_val, value) \
+ (~(~(reg_val) | AS_FAULTSTATUS_SOURCE_ID_MASK) | \
+ (((value) << AS_FAULTSTATUS_SOURCE_ID_SHIFT) & AS_FAULTSTATUS_SOURCE_ID_MASK))
+
+/*
+ * Begin MMU TRANSCFG register values
+ */
+#define AS_TRANSCFG_MODE_SHIFT GPU_U(0)
+#define AS_TRANSCFG_MODE_MASK (GPU_U(0xF) << AS_TRANSCFG_MODE_SHIFT)
+#define AS_TRANSCFG_MODE_GET(reg_val) (((reg_val)&AS_TRANSCFG_MODE_MASK) >> AS_TRANSCFG_MODE_SHIFT)
+#define AS_TRANSCFG_MODE_SET(reg_val, value) \
+ (~(~(reg_val) | AS_TRANSCFG_MODE_MASK) | \
+ (((value) << AS_TRANSCFG_MODE_SHIFT) & AS_TRANSCFG_MODE_MASK))
+
+#define AS_TRANSCFG_MODE_UNMAPPED 0x1
+#define AS_TRANSCFG_MODE_IDENTITY 0x2
+#define AS_TRANSCFG_MODE_AARCH64_4K 0x6
+#define AS_TRANSCFG_MODE_AARCH64_64K 0x8
+
+#define AS_TRANSCFG_PTW_MEMATTR_SHIFT GPU_U(24)
+#define AS_TRANSCFG_PTW_MEMATTR_MASK (GPU_U(0x3) << AS_TRANSCFG_PTW_MEMATTR_SHIFT)
+#define AS_TRANSCFG_PTW_MEMATTR_GET(reg_val) \
+ (((reg_val)&AS_TRANSCFG_PTW_MEMATTR_MASK) >> AS_TRANSCFG_PTW_MEMATTR_SHIFT)
+#define AS_TRANSCFG_PTW_MEMATTR_SET(reg_val, value) \
+ (~(~(reg_val) | AS_TRANSCFG_PTW_MEMATTR_MASK) | \
+ (((value) << AS_TRANSCFG_PTW_MEMATTR_SHIFT) & AS_TRANSCFG_PTW_MEMATTR_MASK))
+
+#define AS_TRANSCFG_PTW_MEMATTR_INVALID 0x0
+#define AS_TRANSCFG_PTW_MEMATTR_NON_CACHEABLE 0x1
+#define AS_TRANSCFG_PTW_MEMATTR_WRITE_BACK 0x2
+
+#define AS_TRANSCFG_PTW_SH_SHIFT GPU_U(28)
+#define AS_TRANSCFG_PTW_SH_MASK (GPU_U(0x3) << AS_TRANSCFG_PTW_SH_SHIFT)
+#define AS_TRANSCFG_PTW_SH_GET(reg_val) \
+ (((reg_val)&AS_TRANSCFG_PTW_SH_MASK) >> AS_TRANSCFG_PTW_SH_SHIFT)
+#define AS_TRANSCFG_PTW_SH_SET(reg_val, value) \
+ (~(~(reg_val) | AS_TRANSCFG_PTW_SH_MASK) | \
+ (((value) << AS_TRANSCFG_PTW_SH_SHIFT) & AS_TRANSCFG_PTW_SH_MASK))
+
+#define AS_TRANSCFG_PTW_SH_NON_SHAREABLE 0x0
+#define AS_TRANSCFG_PTW_SH_OUTER_SHAREABLE 0x2
+#define AS_TRANSCFG_PTW_SH_INNER_SHAREABLE 0x3
+
+#define AS_TRANSCFG_R_ALLOCATE_SHIFT GPU_U(30)
+#define AS_TRANSCFG_R_ALLOCATE_MASK (GPU_U(0x1) << AS_TRANSCFG_R_ALLOCATE_SHIFT)
+#define AS_TRANSCFG_R_ALLOCATE_GET(reg_val) \
+ (((reg_val)&AS_TRANSCFG_R_ALLOCATE_MASK) >> AS_TRANSCFG_R_ALLOCATE_SHIFT)
+#define AS_TRANSCFG_R_ALLOCATE_SET(reg_val, value) \
+ (~(~(reg_val) | AS_TRANSCFG_R_ALLOCATE_MASK) | \
+ (((value) << AS_TRANSCFG_R_ALLOCATE_SHIFT) & AS_TRANSCFG_R_ALLOCATE_MASK))
+
+#define AS_TRANSCFG_R_ALLOCATE_NO_READ_ALLOCATE 0x0
+#define AS_TRANSCFG_R_ALLOCATE_READ_ALLOCATE 0x1
+
+/* AS_COMMAND register */
+#define AS_COMMAND_COMMAND_SHIFT GPU_U(0)
+#define AS_COMMAND_COMMAND_MASK (GPU_U(0xFF) << AS_COMMAND_COMMAND_SHIFT)
+#define AS_COMMAND_COMMAND_GET(reg_val) \
+ (((reg_val)&AS_COMMAND_COMMAND_MASK) >> AS_COMMAND_COMMAND_SHIFT)
+#define AS_COMMAND_COMMAND_SET(reg_val, value) \
+ (~(~(reg_val) | AS_COMMAND_COMMAND_MASK) | \
+ (((value) << AS_COMMAND_COMMAND_SHIFT) & AS_COMMAND_COMMAND_MASK))
+
+#define AS_COMMAND_COMMAND_NOP 0x0
+#define AS_COMMAND_COMMAND_UPDATE 0x1
+#define AS_COMMAND_COMMAND_LOCK 0x2
+#define AS_COMMAND_COMMAND_UNLOCK 0x3
+#define AS_COMMAND_COMMAND_FLUSH_PT 0x4
+#define AS_COMMAND_COMMAND_FLUSH_MEM 0x5
+
+/* AS_LOCKADDR register */
+#define AS_LOCKADDR_LOCKADDR_SIZE_SHIFT GPU_U(0)
+#define AS_LOCKADDR_LOCKADDR_SIZE_MASK (GPU_U(0x3F) << AS_LOCKADDR_LOCKADDR_SIZE_SHIFT)
+#define AS_LOCKADDR_LOCKADDR_SIZE_GET(reg_val) \
+ (((reg_val)&AS_LOCKADDR_LOCKADDR_SIZE_MASK) >> AS_LOCKADDR_LOCKADDR_SIZE_SHIFT)
+#define AS_LOCKADDR_LOCKADDR_SIZE_SET(reg_val, value) \
+ (~(~(reg_val) | AS_LOCKADDR_LOCKADDR_SIZE_MASK) | \
+ (((value) << AS_LOCKADDR_LOCKADDR_SIZE_SHIFT) & AS_LOCKADDR_LOCKADDR_SIZE_MASK))
+#define AS_LOCKADDR_FLUSH_SKIP_LEVELS_SHIFT GPU_U(6)
+#define AS_LOCKADDR_FLUSH_SKIP_LEVELS_MASK (GPU_U(0xF) << AS_LOCKADDR_FLUSH_SKIP_LEVELS_SHIFT)
+#define AS_LOCKADDR_FLUSH_SKIP_LEVELS_GET(reg_val) \
+ (((reg_val)&AS_LOCKADDR_FLUSH_SKIP_LEVELS_MASK) >> AS_LOCKADDR_FLUSH_SKIP_LEVELS_SHIFT)
+#define AS_LOCKADDR_FLUSH_SKIP_LEVELS_SET(reg_val, value) \
+ (~(~(reg_val) | AS_LOCKADDR_FLUSH_SKIP_LEVELS_MASK) | \
+ (((value) << AS_LOCKADDR_FLUSH_SKIP_LEVELS_SHIFT) & AS_LOCKADDR_FLUSH_SKIP_LEVELS_MASK))
+#define AS_LOCKADDR_LOCKADDR_BASE_SHIFT GPU_U(12)
+#define AS_LOCKADDR_LOCKADDR_BASE_MASK (GPU_ULL(0xFFFFFFFFFFFFF) << AS_LOCKADDR_LOCKADDR_BASE_SHIFT)
+#define AS_LOCKADDR_LOCKADDR_BASE_GET(reg_val) \
+ (((reg_val)&AS_LOCKADDR_LOCKADDR_BASE_MASK) >> AS_LOCKADDR_LOCKADDR_BASE_SHIFT)
+#define AS_LOCKADDR_LOCKADDR_BASE_SET(reg_val, value) \
+ (~(~(reg_val) | AS_LOCKADDR_LOCKADDR_BASE_MASK) | \
+ (((uint64_t)(value) << AS_LOCKADDR_LOCKADDR_BASE_SHIFT) & \
+ AS_LOCKADDR_LOCKADDR_BASE_MASK))
+
+/* AS_MEMATTR_ATTRIBUTE0 register */
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_W_SHIFT GPU_U(0)
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_W_MASK (GPU_U(0x1) << AS_MEMATTR_ATTRIBUTE0_ALLOC_W_SHIFT)
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_W_GET(reg_val) \
+ (((reg_val)&AS_MEMATTR_ATTRIBUTE0_ALLOC_W_MASK) >> AS_MEMATTR_ATTRIBUTE0_ALLOC_W_SHIFT)
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_W_SET(reg_val, value) \
+ (~(~(reg_val) | AS_MEMATTR_ATTRIBUTE0_ALLOC_W_MASK) | \
+ (((value) << AS_MEMATTR_ATTRIBUTE0_ALLOC_W_SHIFT) & AS_MEMATTR_ATTRIBUTE0_ALLOC_W_MASK))
+
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_W_NOALLOCATE 0x0
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_W_ALLOCATE 0x1
+
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_R_SHIFT GPU_U(1)
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_R_MASK (GPU_U(0x1) << AS_MEMATTR_ATTRIBUTE0_ALLOC_R_SHIFT)
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_R_GET(reg_val) \
+ (((reg_val)&AS_MEMATTR_ATTRIBUTE0_ALLOC_R_MASK) >> AS_MEMATTR_ATTRIBUTE0_ALLOC_R_SHIFT)
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_R_SET(reg_val, value) \
+ (~(~(reg_val) | AS_MEMATTR_ATTRIBUTE0_ALLOC_R_MASK) | \
+ (((value) << AS_MEMATTR_ATTRIBUTE0_ALLOC_R_SHIFT) & AS_MEMATTR_ATTRIBUTE0_ALLOC_R_MASK))
+
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_R_NOALLOCATE 0x0
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_R_ALLOCATE 0x1
+
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_SEL_SHIFT GPU_U(2)
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_SEL_MASK (GPU_U(0x3) << AS_MEMATTR_ATTRIBUTE0_ALLOC_SEL_SHIFT)
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_SEL_GET(reg_val) \
+ (((reg_val)&AS_MEMATTR_ATTRIBUTE0_ALLOC_SEL_MASK) >> AS_MEMATTR_ATTRIBUTE0_ALLOC_SEL_SHIFT)
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_SEL_SET(reg_val, value) \
+ (~(~(reg_val) | AS_MEMATTR_ATTRIBUTE0_ALLOC_SEL_MASK) | \
+ (((value) << AS_MEMATTR_ATTRIBUTE0_ALLOC_SEL_SHIFT) & \
+ AS_MEMATTR_ATTRIBUTE0_ALLOC_SEL_MASK))
+
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_SEL_IMPL 0x2
+#define AS_MEMATTR_ATTRIBUTE0_ALLOC_SEL_ALLOC 0x3
+
+#define AS_MEMATTR_ATTRIBUTE0_NO_IDENTITY_COHERENCY_SHIFT GPU_U(4)
+#define AS_MEMATTR_ATTRIBUTE0_NO_IDENTITY_COHERENCY_MASK \
+ (GPU_U(0x3) << AS_MEMATTR_ATTRIBUTE0_NO_IDENTITY_COHERENCY_SHIFT)
+#define AS_MEMATTR_ATTRIBUTE0_NO_IDENTITY_COHERENCY_GET(reg_val) \
+ (((reg_val)&AS_MEMATTR_ATTRIBUTE0_NO_IDENTITY_COHERENCY_MASK) >> \
+ AS_MEMATTR_ATTRIBUTE0_NO_IDENTITY_COHERENCY_SHIFT)
+#define AS_MEMATTR_ATTRIBUTE0_NO_IDENTITY_COHERENCY_SET(reg_val, value) \
+ (~(~(reg_val) | AS_MEMATTR_ATTRIBUTE0_NO_IDENTITY_COHERENCY_MASK) | \
+ (((value) << AS_MEMATTR_ATTRIBUTE0_NO_IDENTITY_COHERENCY_SHIFT) & \
+ AS_MEMATTR_ATTRIBUTE0_NO_IDENTITY_COHERENCY_MASK))
+
+#define AS_MEMATTR_ATTRIBUTE0_NO_IDENTITY_COHERENCY_MIDGARD_INNER_DOMAIN 0x0
+#define AS_MEMATTR_ATTRIBUTE0_NO_IDENTITY_COHERENCY_CPU_INNER_DOMAIN 0x1
+#define AS_MEMATTR_ATTRIBUTE0_NO_IDENTITY_COHERENCY_CPU_INNER_DOMAIN_SHADER_COH 0x2
+
+#define AS_MEMATTR_ATTRIBUTE0_MEMORY_TYPE_SHIFT GPU_U(6)
+#define AS_MEMATTR_ATTRIBUTE0_MEMORY_TYPE_MASK \
+ (GPU_U(0x3) << AS_MEMATTR_ATTRIBUTE0_MEMORY_TYPE_SHIFT)
+#define AS_MEMATTR_ATTRIBUTE0_MEMORY_TYPE_GET(reg_val) \
+ (((reg_val)&AS_MEMATTR_ATTRIBUTE0_MEMORY_TYPE_MASK) >> \
+ AS_MEMATTR_ATTRIBUTE0_MEMORY_TYPE_SHIFT)
+#define AS_MEMATTR_ATTRIBUTE0_MEMORY_TYPE_SET(reg_val, value) \
+ (~(~(reg_val) | AS_MEMATTR_ATTRIBUTE0_MEMORY_TYPE_MASK) | \
+ (((value) << AS_MEMATTR_ATTRIBUTE0_MEMORY_TYPE_SHIFT) & \
+ AS_MEMATTR_ATTRIBUTE0_MEMORY_TYPE_MASK))
+
+#define AS_MEMATTR_ATTRIBUTE0_MEMORY_TYPE_NON_CACHEABLE 0x1
+#define AS_MEMATTR_ATTRIBUTE0_MEMORY_TYPE_WRITE_BACK 0x2
+#define AS_MEMATTR_ATTRIBUTE0_MEMORY_TYPE_FAULT 0x3
+
+#define AS_MEMATTR_ATTRIBUTE0_IDENTITY_COHERENCY_SHIFT GPU_U(4)
+#define AS_MEMATTR_ATTRIBUTE0_IDENTITY_COHERENCY_MASK \
+ (GPU_U(0x3) << AS_MEMATTR_ATTRIBUTE0_IDENTITY_COHERENCY_SHIFT)
+#define AS_MEMATTR_ATTRIBUTE0_IDENTITY_COHERENCY_GET(reg_val) \
+ (((reg_val)&AS_MEMATTR_ATTRIBUTE0_IDENTITY_COHERENCY_MASK) >> \
+ AS_MEMATTR_ATTRIBUTE0_IDENTITY_COHERENCY_SHIFT)
+#define AS_MEMATTR_ATTRIBUTE0_IDENTITY_COHERENCY_SET(reg_val, value) \
+ (~(~(reg_val) | AS_MEMATTR_ATTRIBUTE0_IDENTITY_COHERENCY_MASK) | \
+ (((value) << AS_MEMATTR_ATTRIBUTE0_IDENTITY_COHERENCY_SHIFT) & \
+ AS_MEMATTR_ATTRIBUTE0_IDENTITY_COHERENCY_MASK))
+
+#define AS_MEMATTR_ATTRIBUTE0_IDENTITY_COHERENCY_NON_SHAREABLE 0x0
+#define AS_MEMATTR_ATTRIBUTE0_IDENTITY_COHERENCY_INTERNAL_SHAREABLE 0x1
+#define AS_MEMATTR_ATTRIBUTE0_IDENTITY_COHERENCY_OUTER_SHAREABLE 0x2
+#define AS_MEMATTR_ATTRIBUTE0_IDENTITY_COHERENCY_INNER_SHAREABLE 0x3
+
+/* L2_MMU_CONFIG register */
+#define L2_MMU_CONFIG_ALLOW_SNOOP_DISPARITY_SHIFT (23)
+#define L2_MMU_CONFIG_ALLOW_SNOOP_DISPARITY (0x1 << L2_MMU_CONFIG_ALLOW_SNOOP_DISPARITY_SHIFT)
+/* End L2_MMU_CONFIG register */
+
+/* THREAD_* registers */
+
+/* THREAD_FEATURES IMPLEMENTATION_TECHNOLOGY values */
+#define THREAD_FEATURES_IMPLEMENTATION_TECHNOLOGY_NOT_SPECIFIED 0U
+#define THREAD_FEATURES_IMPLEMENTATION_TECHNOLOGY_SILICON 1U
+#define THREAD_FEATURES_IMPLEMENTATION_TECHNOLOGY_FPGA 2U
+#define THREAD_FEATURES_IMPLEMENTATION_TECHNOLOGY_SOFTWARE 3U
+
+/* End THREAD_* registers */
+
+/* SHADER_CONFIG register */
+#define SC_LS_ALLOW_ATTR_TYPES (1ul << 16)
+#define SC_TLS_HASH_ENABLE (1ul << 17)
+#define SC_LS_ATTR_CHECK_DISABLE (1ul << 18)
+#define SC_VAR_ALGORITHM (1ul << 29)
+/* End SHADER_CONFIG register */
+
+/* TILER_CONFIG register */
+#define TC_CLOCK_GATE_OVERRIDE (1ul << 0)
+/* End TILER_CONFIG register */
+
+/* L2_CONFIG register */
+#define L2_CONFIG_SIZE_SHIFT 16
+#define L2_CONFIG_SIZE_MASK (0xFFul << L2_CONFIG_SIZE_SHIFT)
+#define L2_CONFIG_HASH_SHIFT 24
+#define L2_CONFIG_HASH_MASK (0xFFul << L2_CONFIG_HASH_SHIFT)
+#define L2_CONFIG_L2_SLICE_HASH_ENABLE_SHIFT 24
+#define L2_CONFIG_L2_SLICE_HASH_ENABLE_MASK (1ul << L2_CONFIG_L2_SLICE_HASH_ENABLE_SHIFT)
+/* Aliases for _ASN_HASH_ENABLE_ */
+#define L2_CONFIG_ASN_HASH_ENABLE_SHIFT L2_CONFIG_L2_SLICE_HASH_ENABLE_SHIFT
+#define L2_CONFIG_ASN_HASH_ENABLE_MASK L2_CONFIG_L2_SLICE_HASH_ENABLE_MASK
+
+/* End L2_CONFIG register */
+
+/* AMBA_FEATURES register */
+#define AMBA_FEATURES_ACE_LITE_SHIFT GPU_U(0)
+#define AMBA_FEATURES_ACE_LITE_MASK (GPU_U(0x1) << AMBA_FEATURES_ACE_LITE_SHIFT)
+#define AMBA_FEATURES_ACE_LITE_GET(reg_val) \
+ (((reg_val)&AMBA_FEATURES_ACE_LITE_MASK) >> AMBA_FEATURES_ACE_LITE_SHIFT)
+#define AMBA_FEATURES_ACE_LITE_SET(reg_val, value) \
+ (((reg_val) & ~AMBA_FEATURES_ACE_LITE_MASK) | \
+ (((value) << AMBA_FEATURES_ACE_LITE_SHIFT) & AMBA_FEATURES_ACE_LITE_MASK))
+#define AMBA_FEATURES_ACE_SHIFT GPU_U(1)
+#define AMBA_FEATURES_ACE_MASK (GPU_U(0x1) << AMBA_FEATURES_ACE_SHIFT)
+#define AMBA_FEATURES_ACE_GET(reg_val) \
+ (((reg_val)&AMBA_FEATURES_ACE_MASK) >> AMBA_FEATURES_ACE_SHIFT)
+#define AMBA_FEATURES_ACE_SET(reg_val, value) \
+ (((reg_val) & ~AMBA_FEATURES_ACE_MASK) | \
+ (((value) << AMBA_FEATURES_ACE_SHIFT) & AMBA_FEATURES_ACE_MASK))
+#define AMBA_FEATURES_SHAREABLE_CACHE_SUPPORT_SHIFT GPU_U(5)
+#define AMBA_FEATURES_SHAREABLE_CACHE_SUPPORT_MASK \
+ (GPU_U(0x1) << AMBA_FEATURES_SHAREABLE_CACHE_SUPPORT_SHIFT)
+#define AMBA_FEATURES_SHAREABLE_CACHE_SUPPORT_GET(reg_val) \
+ (((reg_val)&AMBA_FEATURES_SHAREABLE_CACHE_SUPPORT_MASK) >> \
+ AMBA_FEATURES_SHAREABLE_CACHE_SUPPORT_SHIFT)
+#define AMBA_FEATURES_SHAREABLE_CACHE_SUPPORT_SET(reg_val, value) \
+ (((reg_val) & ~AMBA_FEATURES_SHAREABLE_CACHE_SUPPORT_MASK) | \
+ (((value) << AMBA_FEATURES_SHAREABLE_CACHE_SUPPORT_SHIFT) & \
+ AMBA_FEATURES_SHAREABLE_CACHE_SUPPORT_MASK))
+#define AMBA_FEATURES_INVALIDATE_HINT_SHIFT GPU_U(6)
+#define AMBA_FEATURES_INVALIDATE_HINT_MASK (GPU_U(0x1) << AMBA_FEATURES_INVALIDATE_HINT_SHIFT)
+#define AMBA_FEATURES_INVALIDATE_HINT_GET(reg_val) \
+ (((reg_val)&AMBA_FEATURES_INVALIDATE_HINT_MASK) >> AMBA_FEATURES_INVALIDATE_HINT_SHIFT)
+#define AMBA_FEATURES_INVALIDATE_HINT_SET(reg_val, value) \
+ (((reg_val) & ~AMBA_FEATURES_INVALIDATE_HINT_MASK) | \
+ (((value) << AMBA_FEATURES_INVALIDATE_HINT_SHIFT) & AMBA_FEATURES_INVALIDATE_HINT_MASK))
+
+/* AMBA_ENABLE register */
+#define AMBA_ENABLE_COHERENCY_PROTOCOL_SHIFT GPU_U(0)
+#define AMBA_ENABLE_COHERENCY_PROTOCOL_MASK (GPU_U(0x1F) << AMBA_ENABLE_COHERENCY_PROTOCOL_SHIFT)
+#define AMBA_ENABLE_COHERENCY_PROTOCOL_GET(reg_val) \
+ (((reg_val)&AMBA_ENABLE_COHERENCY_PROTOCOL_MASK) >> AMBA_ENABLE_COHERENCY_PROTOCOL_SHIFT)
+#define AMBA_ENABLE_COHERENCY_PROTOCOL_SET(reg_val, value) \
+ (((reg_val) & ~AMBA_ENABLE_COHERENCY_PROTOCOL_MASK) | \
+ (((value) << AMBA_ENABLE_COHERENCY_PROTOCOL_SHIFT) & \
+ AMBA_ENABLE_COHERENCY_PROTOCOL_MASK))
+/* AMBA_ENABLE_coherency_protocol values */
+#define AMBA_ENABLE_COHERENCY_PROTOCOL_ACE_LITE 0x0
+#define AMBA_ENABLE_COHERENCY_PROTOCOL_ACE 0x1
+#define AMBA_ENABLE_COHERENCY_PROTOCOL_NO_COHERENCY 0x1F
+/* End of AMBA_ENABLE_coherency_protocol values */
+#define AMBA_ENABLE_SHAREABLE_CACHE_SUPPORT_SHIFT GPU_U(5)
+#define AMBA_ENABLE_SHAREABLE_CACHE_SUPPORT_MASK \
+ (GPU_U(0x1) << AMBA_ENABLE_SHAREABLE_CACHE_SUPPORT_SHIFT)
+#define AMBA_ENABLE_SHAREABLE_CACHE_SUPPORT_GET(reg_val) \
+ (((reg_val)&AMBA_ENABLE_SHAREABLE_CACHE_SUPPORT_MASK) >> \
+ AMBA_ENABLE_SHAREABLE_CACHE_SUPPORT_SHIFT)
+#define AMBA_ENABLE_SHAREABLE_CACHE_SUPPORT_SET(reg_val, value) \
+ (((reg_val) & ~AMBA_ENABLE_SHAREABLE_CACHE_SUPPORT_MASK) | \
+ (((value) << AMBA_ENABLE_SHAREABLE_CACHE_SUPPORT_SHIFT) & \
+ AMBA_ENABLE_SHAREABLE_CACHE_SUPPORT_MASK))
+#define AMBA_ENABLE_INVALIDATE_HINT_SHIFT GPU_U(6)
+#define AMBA_ENABLE_INVALIDATE_HINT_MASK (GPU_U(0x1) << AMBA_ENABLE_INVALIDATE_HINT_SHIFT)
+#define AMBA_ENABLE_INVALIDATE_HINT_GET(reg_val) \
+ (((reg_val)&AMBA_ENABLE_INVALIDATE_HINT_MASK) >> AMBA_ENABLE_INVALIDATE_HINT_SHIFT)
+#define AMBA_ENABLE_INVALIDATE_HINT_SET(reg_val, value) \
+ (((reg_val) & ~AMBA_ENABLE_INVALIDATE_HINT_MASK) | \
+ (((value) << AMBA_ENABLE_INVALIDATE_HINT_SHIFT) & AMBA_ENABLE_INVALIDATE_HINT_MASK))
+
+/* SYSC_ALLOC read IDs */
+#define SYSC_ALLOC_ID_R_OTHER 0x00
+#define SYSC_ALLOC_ID_R_CSF 0x02
+#define SYSC_ALLOC_ID_R_MMU 0x04
+#define SYSC_ALLOC_ID_R_TILER_VERT 0x08
+#define SYSC_ALLOC_ID_R_TILER_PTR 0x09
+#define SYSC_ALLOC_ID_R_TILER_INDEX 0x0A
+#define SYSC_ALLOC_ID_R_TILER_OTHER 0x0B
+#define SYSC_ALLOC_ID_R_IC 0x10
+#define SYSC_ALLOC_ID_R_ATTR 0x11
+#define SYSC_ALLOC_ID_R_SCM 0x12
+#define SYSC_ALLOC_ID_R_FSDC 0x13
+#define SYSC_ALLOC_ID_R_VL 0x14
+#define SYSC_ALLOC_ID_R_PLR 0x15
+#define SYSC_ALLOC_ID_R_TEX 0x18
+#define SYSC_ALLOC_ID_R_LSC 0x1c
+
+/* SYSC_ALLOC write IDs */
+#define SYSC_ALLOC_ID_W_OTHER 0x00
+#define SYSC_ALLOC_ID_W_CSF 0x02
+#define SYSC_ALLOC_ID_W_PCB 0x07
+#define SYSC_ALLOC_ID_W_TILER_PTR 0x09
+#define SYSC_ALLOC_ID_W_TILER_VERT_PLIST 0x0A
+#define SYSC_ALLOC_ID_W_TILER_OTHER 0x0B
+#define SYSC_ALLOC_ID_W_L2_EVICT 0x0C
+#define SYSC_ALLOC_ID_W_L2_FLUSH 0x0D
+#define SYSC_ALLOC_ID_W_TIB_COLOR 0x10
+#define SYSC_ALLOC_ID_W_TIB_COLOR_AFBCH 0x11
+#define SYSC_ALLOC_ID_W_TIB_COLOR_AFBCB 0x12
+#define SYSC_ALLOC_ID_W_TIB_CRC 0x13
+#define SYSC_ALLOC_ID_W_TIB_DS 0x14
+#define SYSC_ALLOC_ID_W_TIB_DS_AFBCH 0x15
+#define SYSC_ALLOC_ID_W_TIB_DS_AFBCB 0x16
+#define SYSC_ALLOC_ID_W_LSC 0x1C
+
+/* SYSC_ALLOC values */
+#define SYSC_ALLOC_L2_ALLOC 0x0
+#define SYSC_ALLOC_NEVER_ALLOC 0x2
+#define SYSC_ALLOC_ALWAYS_ALLOC 0x3
+#define SYSC_ALLOC_PTL_ALLOC 0x4
+#define SYSC_ALLOC_L2_PTL_ALLOC 0x5
+
+/* SYSC_ALLOC register */
+#define SYSC_ALLOC_R_SYSC_ALLOC0_SHIFT (0)
+#define SYSC_ALLOC_R_SYSC_ALLOC0_MASK ((0xFU) << SYSC_ALLOC_R_SYSC_ALLOC0_SHIFT)
+#define SYSC_ALLOC_R_SYSC_ALLOC0_GET(reg_val) \
+ (((reg_val)&SYSC_ALLOC_R_SYSC_ALLOC0_MASK) >> SYSC_ALLOC_R_SYSC_ALLOC0_SHIFT)
+#define SYSC_ALLOC_R_SYSC_ALLOC0_SET(reg_val, value) \
+ (((reg_val) & ~SYSC_ALLOC_R_SYSC_ALLOC0_MASK) | \
+ (((value) << SYSC_ALLOC_R_SYSC_ALLOC0_SHIFT) & SYSC_ALLOC_R_SYSC_ALLOC0_MASK))
+/* End of SYSC_ALLOC_R_SYSC_ALLOC0 values */
+#define SYSC_ALLOC_W_SYSC_ALLOC0_SHIFT (4)
+#define SYSC_ALLOC_W_SYSC_ALLOC0_MASK ((0xFU) << SYSC_ALLOC_W_SYSC_ALLOC0_SHIFT)
+#define SYSC_ALLOC_W_SYSC_ALLOC0_GET(reg_val) \
+ (((reg_val)&SYSC_ALLOC_W_SYSC_ALLOC0_MASK) >> SYSC_ALLOC_W_SYSC_ALLOC0_SHIFT)
+#define SYSC_ALLOC_W_SYSC_ALLOC0_SET(reg_val, value) \
+ (((reg_val) & ~SYSC_ALLOC_W_SYSC_ALLOC0_MASK) | \
+ (((value) << SYSC_ALLOC_W_SYSC_ALLOC0_SHIFT) & SYSC_ALLOC_W_SYSC_ALLOC0_MASK))
+/* End of SYSC_ALLOC_W_SYSC_ALLOC0 values */
+#define SYSC_ALLOC_R_SYSC_ALLOC1_SHIFT (8)
+#define SYSC_ALLOC_R_SYSC_ALLOC1_MASK ((0xFU) << SYSC_ALLOC_R_SYSC_ALLOC1_SHIFT)
+#define SYSC_ALLOC_R_SYSC_ALLOC1_GET(reg_val) \
+ (((reg_val)&SYSC_ALLOC_R_SYSC_ALLOC1_MASK) >> SYSC_ALLOC_R_SYSC_ALLOC1_SHIFT)
+#define SYSC_ALLOC_R_SYSC_ALLOC1_SET(reg_val, value) \
+ (((reg_val) & ~SYSC_ALLOC_R_SYSC_ALLOC1_MASK) | \
+ (((value) << SYSC_ALLOC_R_SYSC_ALLOC1_SHIFT) & SYSC_ALLOC_R_SYSC_ALLOC1_MASK))
+/* End of SYSC_ALLOC_R_SYSC_ALLOC1 values */
+#define SYSC_ALLOC_W_SYSC_ALLOC1_SHIFT (12)
+#define SYSC_ALLOC_W_SYSC_ALLOC1_MASK ((0xFU) << SYSC_ALLOC_W_SYSC_ALLOC1_SHIFT)
+#define SYSC_ALLOC_W_SYSC_ALLOC1_GET(reg_val) \
+ (((reg_val)&SYSC_ALLOC_W_SYSC_ALLOC1_MASK) >> SYSC_ALLOC_W_SYSC_ALLOC1_SHIFT)
+#define SYSC_ALLOC_W_SYSC_ALLOC1_SET(reg_val, value) \
+ (((reg_val) & ~SYSC_ALLOC_W_SYSC_ALLOC1_MASK) | \
+ (((value) << SYSC_ALLOC_W_SYSC_ALLOC1_SHIFT) & SYSC_ALLOC_W_SYSC_ALLOC1_MASK))
+/* End of SYSC_ALLOC_W_SYSC_ALLOC1 values */
+#define SYSC_ALLOC_R_SYSC_ALLOC2_SHIFT (16)
+#define SYSC_ALLOC_R_SYSC_ALLOC2_MASK ((0xFU) << SYSC_ALLOC_R_SYSC_ALLOC2_SHIFT)
+#define SYSC_ALLOC_R_SYSC_ALLOC2_GET(reg_val) \
+ (((reg_val)&SYSC_ALLOC_R_SYSC_ALLOC2_MASK) >> SYSC_ALLOC_R_SYSC_ALLOC2_SHIFT)
+#define SYSC_ALLOC_R_SYSC_ALLOC2_SET(reg_val, value) \
+ (((reg_val) & ~SYSC_ALLOC_R_SYSC_ALLOC2_MASK) | \
+ (((value) << SYSC_ALLOC_R_SYSC_ALLOC2_SHIFT) & SYSC_ALLOC_R_SYSC_ALLOC2_MASK))
+/* End of SYSC_ALLOC_R_SYSC_ALLOC2 values */
+#define SYSC_ALLOC_W_SYSC_ALLOC2_SHIFT (20)
+#define SYSC_ALLOC_W_SYSC_ALLOC2_MASK ((0xFU) << SYSC_ALLOC_W_SYSC_ALLOC2_SHIFT)
+#define SYSC_ALLOC_W_SYSC_ALLOC2_GET(reg_val) \
+ (((reg_val)&SYSC_ALLOC_W_SYSC_ALLOC2_MASK) >> SYSC_ALLOC_W_SYSC_ALLOC2_SHIFT)
+#define SYSC_ALLOC_W_SYSC_ALLOC2_SET(reg_val, value) \
+ (((reg_val) & ~SYSC_ALLOC_W_SYSC_ALLOC2_MASK) | \
+ (((value) << SYSC_ALLOC_W_SYSC_ALLOC2_SHIFT) & SYSC_ALLOC_W_SYSC_ALLOC2_MASK))
+/* End of SYSC_ALLOC_W_SYSC_ALLOC2 values */
+#define SYSC_ALLOC_R_SYSC_ALLOC3_SHIFT (24)
+#define SYSC_ALLOC_R_SYSC_ALLOC3_MASK ((0xFU) << SYSC_ALLOC_R_SYSC_ALLOC3_SHIFT)
+#define SYSC_ALLOC_R_SYSC_ALLOC3_GET(reg_val) \
+ (((reg_val)&SYSC_ALLOC_R_SYSC_ALLOC3_MASK) >> SYSC_ALLOC_R_SYSC_ALLOC3_SHIFT)
+#define SYSC_ALLOC_R_SYSC_ALLOC3_SET(reg_val, value) \
+ (((reg_val) & ~SYSC_ALLOC_R_SYSC_ALLOC3_MASK) | \
+ (((value) << SYSC_ALLOC_R_SYSC_ALLOC3_SHIFT) & SYSC_ALLOC_R_SYSC_ALLOC3_MASK))
+/* End of SYSC_ALLOC_R_SYSC_ALLOC3 values */
+#define SYSC_ALLOC_W_SYSC_ALLOC3_SHIFT (28)
+#define SYSC_ALLOC_W_SYSC_ALLOC3_MASK ((0xFU) << SYSC_ALLOC_W_SYSC_ALLOC3_SHIFT)
+#define SYSC_ALLOC_W_SYSC_ALLOC3_GET(reg_val) \
+ (((reg_val)&SYSC_ALLOC_W_SYSC_ALLOC3_MASK) >> SYSC_ALLOC_W_SYSC_ALLOC3_SHIFT)
+#define SYSC_ALLOC_W_SYSC_ALLOC3_SET(reg_val, value) \
+ (((reg_val) & ~SYSC_ALLOC_W_SYSC_ALLOC3_MASK) | \
+ (((value) << SYSC_ALLOC_W_SYSC_ALLOC3_SHIFT) & SYSC_ALLOC_W_SYSC_ALLOC3_MASK))
+/* End of SYSC_ALLOC_W_SYSC_ALLOC3 values */
+
+/* IDVS_GROUP register */
+#define IDVS_GROUP_SIZE_SHIFT (16)
+#define IDVS_GROUP_MAX_SIZE (0x3F)
+
+/* Include POWER_CHANGED_SINGLE in debug builds for use in irq latency test. */
+#ifdef CONFIG_MALI_DEBUG
+#undef GPU_IRQ_REG_ALL
+#define GPU_IRQ_REG_ALL (GPU_IRQ_REG_COMMON | POWER_CHANGED_SINGLE)
+#endif /* CONFIG_MALI_DEBUG */
+
+#endif /* _MALI_KBASE_HW_ACCESS_REGMAP_H_ */